blob: ee9c71153339d5e0dea7619816d6d280edca7490 [file] [log] [blame]
Adam Sawickif1a793c2018-03-13 15:42:22 +01001#include "Tests.h"
2#include "VmaUsage.h"
3#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004#include <atomic>
5#include <thread>
6#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +01007
8#ifdef _WIN32
9
Adam Sawicki33d2ce72018-08-27 13:59:13 +020010static const char* CODE_DESCRIPTION = "Foo";
11
Adam Sawickif2975342018-10-16 13:49:02 +020012extern VkCommandBuffer g_hTemporaryCommandBuffer;
13void BeginSingleTimeCommands();
14void EndSingleTimeCommands();
15
Adam Sawicki0a607132018-08-24 11:18:41 +020016enum CONFIG_TYPE {
17 CONFIG_TYPE_MINIMUM,
18 CONFIG_TYPE_SMALL,
19 CONFIG_TYPE_AVERAGE,
20 CONFIG_TYPE_LARGE,
21 CONFIG_TYPE_MAXIMUM,
22 CONFIG_TYPE_COUNT
23};
24
Adam Sawickif2975342018-10-16 13:49:02 +020025static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
26//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020027
Adam Sawickib8333fb2018-03-13 16:15:53 +010028enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
29
Adam Sawicki0667e332018-08-24 17:26:44 +020030static const char* FREE_ORDER_NAMES[] = {
31 "FORWARD",
32 "BACKWARD",
33 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020034};
35
Adam Sawicki80927152018-09-07 17:27:23 +020036// Copy of internal VmaAlgorithmToStr.
37static const char* AlgorithmToStr(uint32_t algorithm)
38{
39 switch(algorithm)
40 {
41 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
42 return "Linear";
43 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
44 return "Buddy";
45 case 0:
46 return "Default";
47 default:
48 assert(0);
49 return "";
50 }
51}
52
Adam Sawickib8333fb2018-03-13 16:15:53 +010053struct AllocationSize
54{
55 uint32_t Probability;
56 VkDeviceSize BufferSizeMin, BufferSizeMax;
57 uint32_t ImageSizeMin, ImageSizeMax;
58};
59
60struct Config
61{
62 uint32_t RandSeed;
63 VkDeviceSize BeginBytesToAllocate;
64 uint32_t AdditionalOperationCount;
65 VkDeviceSize MaxBytesToAllocate;
66 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
67 std::vector<AllocationSize> AllocationSizes;
68 uint32_t ThreadCount;
69 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
70 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020071 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010072};
73
74struct Result
75{
76 duration TotalTime;
77 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
78 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
79 VkDeviceSize TotalMemoryAllocated;
80 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
81};
82
83void TestDefragmentationSimple();
84void TestDefragmentationFull();
85
86struct PoolTestConfig
87{
88 uint32_t RandSeed;
89 uint32_t ThreadCount;
90 VkDeviceSize PoolSize;
91 uint32_t FrameCount;
92 uint32_t TotalItemCount;
93 // Range for number of items used in each frame.
94 uint32_t UsedItemCountMin, UsedItemCountMax;
95 // Percent of items to make unused, and possibly make some others used in each frame.
96 uint32_t ItemsToMakeUnusedPercent;
97 std::vector<AllocationSize> AllocationSizes;
98
99 VkDeviceSize CalcAvgResourceSize() const
100 {
101 uint32_t probabilitySum = 0;
102 VkDeviceSize sizeSum = 0;
103 for(size_t i = 0; i < AllocationSizes.size(); ++i)
104 {
105 const AllocationSize& allocSize = AllocationSizes[i];
106 if(allocSize.BufferSizeMax > 0)
107 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
108 else
109 {
110 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
111 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
112 }
113 probabilitySum += allocSize.Probability;
114 }
115 return sizeSum / probabilitySum;
116 }
117
118 bool UsesBuffers() const
119 {
120 for(size_t i = 0; i < AllocationSizes.size(); ++i)
121 if(AllocationSizes[i].BufferSizeMax > 0)
122 return true;
123 return false;
124 }
125
126 bool UsesImages() const
127 {
128 for(size_t i = 0; i < AllocationSizes.size(); ++i)
129 if(AllocationSizes[i].ImageSizeMax > 0)
130 return true;
131 return false;
132 }
133};
134
135struct PoolTestResult
136{
137 duration TotalTime;
138 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
139 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
140 size_t LostAllocationCount, LostAllocationTotalSize;
141 size_t FailedAllocationCount, FailedAllocationTotalSize;
142};
143
144static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
145
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200146static uint32_t g_FrameIndex = 0;
147
Adam Sawickib8333fb2018-03-13 16:15:53 +0100148struct BufferInfo
149{
150 VkBuffer Buffer = VK_NULL_HANDLE;
151 VmaAllocation Allocation = VK_NULL_HANDLE;
152};
153
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200154static uint32_t GetAllocationStrategyCount()
155{
156 uint32_t strategyCount = 0;
157 switch(ConfigType)
158 {
159 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
160 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
161 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
162 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
163 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
164 default: assert(0);
165 }
166 return strategyCount;
167}
168
169static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
170{
171 switch(allocStrategy)
172 {
173 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
174 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
175 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
176 case 0: return "Default"; break;
177 default: assert(0); return "";
178 }
179}
180
Adam Sawickib8333fb2018-03-13 16:15:53 +0100181static void InitResult(Result& outResult)
182{
183 outResult.TotalTime = duration::zero();
184 outResult.AllocationTimeMin = duration::max();
185 outResult.AllocationTimeAvg = duration::zero();
186 outResult.AllocationTimeMax = duration::min();
187 outResult.DeallocationTimeMin = duration::max();
188 outResult.DeallocationTimeAvg = duration::zero();
189 outResult.DeallocationTimeMax = duration::min();
190 outResult.TotalMemoryAllocated = 0;
191 outResult.FreeRangeSizeAvg = 0;
192 outResult.FreeRangeSizeMax = 0;
193}
194
195class TimeRegisterObj
196{
197public:
198 TimeRegisterObj(duration& min, duration& sum, duration& max) :
199 m_Min(min),
200 m_Sum(sum),
201 m_Max(max),
202 m_TimeBeg(std::chrono::high_resolution_clock::now())
203 {
204 }
205
206 ~TimeRegisterObj()
207 {
208 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
209 m_Sum += d;
210 if(d < m_Min) m_Min = d;
211 if(d > m_Max) m_Max = d;
212 }
213
214private:
215 duration& m_Min;
216 duration& m_Sum;
217 duration& m_Max;
218 time_point m_TimeBeg;
219};
220
221struct PoolTestThreadResult
222{
223 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
224 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
225 size_t AllocationCount, DeallocationCount;
226 size_t LostAllocationCount, LostAllocationTotalSize;
227 size_t FailedAllocationCount, FailedAllocationTotalSize;
228};
229
230class AllocationTimeRegisterObj : public TimeRegisterObj
231{
232public:
233 AllocationTimeRegisterObj(Result& result) :
234 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
235 {
236 }
237};
238
239class DeallocationTimeRegisterObj : public TimeRegisterObj
240{
241public:
242 DeallocationTimeRegisterObj(Result& result) :
243 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
244 {
245 }
246};
247
248class PoolAllocationTimeRegisterObj : public TimeRegisterObj
249{
250public:
251 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
252 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
253 {
254 }
255};
256
257class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
258{
259public:
260 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
261 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
262 {
263 }
264};
265
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200266static void CurrentTimeToStr(std::string& out)
267{
268 time_t rawTime; time(&rawTime);
269 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
270 char timeStr[128];
271 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
272 out = timeStr;
273}
274
Adam Sawickib8333fb2018-03-13 16:15:53 +0100275VkResult MainTest(Result& outResult, const Config& config)
276{
277 assert(config.ThreadCount > 0);
278
279 InitResult(outResult);
280
281 RandomNumberGenerator mainRand{config.RandSeed};
282
283 time_point timeBeg = std::chrono::high_resolution_clock::now();
284
285 std::atomic<size_t> allocationCount = 0;
286 VkResult res = VK_SUCCESS;
287
288 uint32_t memUsageProbabilitySum =
289 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
290 config.MemUsageProbability[2] + config.MemUsageProbability[3];
291 assert(memUsageProbabilitySum > 0);
292
293 uint32_t allocationSizeProbabilitySum = std::accumulate(
294 config.AllocationSizes.begin(),
295 config.AllocationSizes.end(),
296 0u,
297 [](uint32_t sum, const AllocationSize& allocSize) {
298 return sum + allocSize.Probability;
299 });
300
301 struct Allocation
302 {
303 VkBuffer Buffer;
304 VkImage Image;
305 VmaAllocation Alloc;
306 };
307
308 std::vector<Allocation> commonAllocations;
309 std::mutex commonAllocationsMutex;
310
311 auto Allocate = [&](
312 VkDeviceSize bufferSize,
313 const VkExtent2D imageExtent,
314 RandomNumberGenerator& localRand,
315 VkDeviceSize& totalAllocatedBytes,
316 std::vector<Allocation>& allocations) -> VkResult
317 {
318 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
319
320 uint32_t memUsageIndex = 0;
321 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
322 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
323 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
324
325 VmaAllocationCreateInfo memReq = {};
326 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200327 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100328
329 Allocation allocation = {};
330 VmaAllocationInfo allocationInfo;
331
332 // Buffer
333 if(bufferSize > 0)
334 {
335 assert(imageExtent.width == 0);
336 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
337 bufferInfo.size = bufferSize;
338 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
339
340 {
341 AllocationTimeRegisterObj timeRegisterObj{outResult};
342 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
343 }
344 }
345 // Image
346 else
347 {
348 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
349 imageInfo.imageType = VK_IMAGE_TYPE_2D;
350 imageInfo.extent.width = imageExtent.width;
351 imageInfo.extent.height = imageExtent.height;
352 imageInfo.extent.depth = 1;
353 imageInfo.mipLevels = 1;
354 imageInfo.arrayLayers = 1;
355 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
356 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
357 VK_IMAGE_TILING_OPTIMAL :
358 VK_IMAGE_TILING_LINEAR;
359 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
360 switch(memReq.usage)
361 {
362 case VMA_MEMORY_USAGE_GPU_ONLY:
363 switch(localRand.Generate() % 3)
364 {
365 case 0:
366 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
367 break;
368 case 1:
369 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
370 break;
371 case 2:
372 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
373 break;
374 }
375 break;
376 case VMA_MEMORY_USAGE_CPU_ONLY:
377 case VMA_MEMORY_USAGE_CPU_TO_GPU:
378 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
379 break;
380 case VMA_MEMORY_USAGE_GPU_TO_CPU:
381 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
382 break;
383 }
384 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
385 imageInfo.flags = 0;
386
387 {
388 AllocationTimeRegisterObj timeRegisterObj{outResult};
389 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
390 }
391 }
392
393 if(res == VK_SUCCESS)
394 {
395 ++allocationCount;
396 totalAllocatedBytes += allocationInfo.size;
397 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
398 if(useCommonAllocations)
399 {
400 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
401 commonAllocations.push_back(allocation);
402 }
403 else
404 allocations.push_back(allocation);
405 }
406 else
407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200408 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100409 }
410 return res;
411 };
412
413 auto GetNextAllocationSize = [&](
414 VkDeviceSize& outBufSize,
415 VkExtent2D& outImageSize,
416 RandomNumberGenerator& localRand)
417 {
418 outBufSize = 0;
419 outImageSize = {0, 0};
420
421 uint32_t allocSizeIndex = 0;
422 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
423 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
424 r -= config.AllocationSizes[allocSizeIndex++].Probability;
425
426 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
427 if(allocSize.BufferSizeMax > 0)
428 {
429 assert(allocSize.ImageSizeMax == 0);
430 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
431 outBufSize = allocSize.BufferSizeMin;
432 else
433 {
434 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
435 outBufSize = outBufSize / 16 * 16;
436 }
437 }
438 else
439 {
440 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
441 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
442 else
443 {
444 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
445 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
446 }
447 }
448 };
449
450 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
451 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
452
453 auto ThreadProc = [&](uint32_t randSeed) -> void
454 {
455 RandomNumberGenerator threadRand(randSeed);
456 VkDeviceSize threadTotalAllocatedBytes = 0;
457 std::vector<Allocation> threadAllocations;
458 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
459 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
460 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
461
462 // BEGIN ALLOCATIONS
463 for(;;)
464 {
465 VkDeviceSize bufferSize = 0;
466 VkExtent2D imageExtent = {};
467 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
468 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
469 threadBeginBytesToAllocate)
470 {
471 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
472 break;
473 }
474 else
475 break;
476 }
477
478 // ADDITIONAL ALLOCATIONS AND FREES
479 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
480 {
481 VkDeviceSize bufferSize = 0;
482 VkExtent2D imageExtent = {};
483 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
484
485 // true = allocate, false = free
486 bool allocate = threadRand.Generate() % 2 != 0;
487
488 if(allocate)
489 {
490 if(threadTotalAllocatedBytes +
491 bufferSize +
492 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
493 threadMaxBytesToAllocate)
494 {
495 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
496 break;
497 }
498 }
499 else
500 {
501 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
502 if(useCommonAllocations)
503 {
504 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
505 if(!commonAllocations.empty())
506 {
507 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
508 VmaAllocationInfo allocationInfo;
509 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
510 if(threadTotalAllocatedBytes >= allocationInfo.size)
511 {
512 DeallocationTimeRegisterObj timeRegisterObj{outResult};
513 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
514 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
515 else
516 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
517 threadTotalAllocatedBytes -= allocationInfo.size;
518 commonAllocations.erase(commonAllocations.begin() + indexToFree);
519 }
520 }
521 }
522 else
523 {
524 if(!threadAllocations.empty())
525 {
526 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
527 VmaAllocationInfo allocationInfo;
528 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
529 if(threadTotalAllocatedBytes >= allocationInfo.size)
530 {
531 DeallocationTimeRegisterObj timeRegisterObj{outResult};
532 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
533 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
534 else
535 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
536 threadTotalAllocatedBytes -= allocationInfo.size;
537 threadAllocations.erase(threadAllocations.begin() + indexToFree);
538 }
539 }
540 }
541 }
542 }
543
544 ++numThreadsReachedMaxAllocations;
545
546 WaitForSingleObject(threadsFinishEvent, INFINITE);
547
548 // DEALLOCATION
549 while(!threadAllocations.empty())
550 {
551 size_t indexToFree = 0;
552 switch(config.FreeOrder)
553 {
554 case FREE_ORDER::FORWARD:
555 indexToFree = 0;
556 break;
557 case FREE_ORDER::BACKWARD:
558 indexToFree = threadAllocations.size() - 1;
559 break;
560 case FREE_ORDER::RANDOM:
561 indexToFree = mainRand.Generate() % threadAllocations.size();
562 break;
563 }
564
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 }
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 };
575
576 uint32_t threadRandSeed = mainRand.Generate();
577 std::vector<std::thread> bkgThreads;
578 for(size_t i = 0; i < config.ThreadCount; ++i)
579 {
580 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
581 }
582
583 // Wait for threads reached max allocations
584 while(numThreadsReachedMaxAllocations < config.ThreadCount)
585 Sleep(0);
586
587 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
588 VmaStats vmaStats = {};
589 vmaCalculateStats(g_hAllocator, &vmaStats);
590 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
591 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
592 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
593
594 // Signal threads to deallocate
595 SetEvent(threadsFinishEvent);
596
597 // Wait for threads finished
598 for(size_t i = 0; i < bkgThreads.size(); ++i)
599 bkgThreads[i].join();
600 bkgThreads.clear();
601
602 CloseHandle(threadsFinishEvent);
603
604 // Deallocate remaining common resources
605 while(!commonAllocations.empty())
606 {
607 size_t indexToFree = 0;
608 switch(config.FreeOrder)
609 {
610 case FREE_ORDER::FORWARD:
611 indexToFree = 0;
612 break;
613 case FREE_ORDER::BACKWARD:
614 indexToFree = commonAllocations.size() - 1;
615 break;
616 case FREE_ORDER::RANDOM:
617 indexToFree = mainRand.Generate() % commonAllocations.size();
618 break;
619 }
620
621 {
622 DeallocationTimeRegisterObj timeRegisterObj{outResult};
623 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
624 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
625 else
626 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
627 }
628 commonAllocations.erase(commonAllocations.begin() + indexToFree);
629 }
630
631 if(allocationCount)
632 {
633 outResult.AllocationTimeAvg /= allocationCount;
634 outResult.DeallocationTimeAvg /= allocationCount;
635 }
636
637 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
638
639 return res;
640}
641
Adam Sawickie44c6262018-06-15 14:30:39 +0200642static void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100643{
644 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200645 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100646 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200647 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100648}
649
650struct AllocInfo
651{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200652 VmaAllocation m_Allocation = VK_NULL_HANDLE;
653 VkBuffer m_Buffer = VK_NULL_HANDLE;
654 VkImage m_Image = VK_NULL_HANDLE;
655 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100656 union
657 {
658 VkBufferCreateInfo m_BufferInfo;
659 VkImageCreateInfo m_ImageInfo;
660 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200661
662 void CreateBuffer(
663 const VkBufferCreateInfo& bufCreateInfo,
664 const VmaAllocationCreateInfo& allocCreateInfo);
665 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100666};
667
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200668void AllocInfo::CreateBuffer(
669 const VkBufferCreateInfo& bufCreateInfo,
670 const VmaAllocationCreateInfo& allocCreateInfo)
671{
672 m_BufferInfo = bufCreateInfo;
673 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
674 TEST(res == VK_SUCCESS);
675}
676
677void AllocInfo::Destroy()
678{
679 if(m_Image)
680 {
681 vkDestroyImage(g_hDevice, m_Image, nullptr);
682 }
683 if(m_Buffer)
684 {
685 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
686 }
687 if(m_Allocation)
688 {
689 vmaFreeMemory(g_hAllocator, m_Allocation);
690 }
691}
692
Adam Sawickif2975342018-10-16 13:49:02 +0200693class StagingBufferCollection
694{
695public:
696 StagingBufferCollection() { }
697 ~StagingBufferCollection();
698 // Returns false if maximum total size of buffers would be exceeded.
699 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
700 void ReleaseAllBuffers();
701
702private:
703 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
704 struct BufInfo
705 {
706 VmaAllocation Allocation = VK_NULL_HANDLE;
707 VkBuffer Buffer = VK_NULL_HANDLE;
708 VkDeviceSize Size = VK_WHOLE_SIZE;
709 void* MappedPtr = nullptr;
710 bool Used = false;
711 };
712 std::vector<BufInfo> m_Bufs;
713 // Including both used and unused.
714 VkDeviceSize m_TotalSize = 0;
715};
716
717StagingBufferCollection::~StagingBufferCollection()
718{
719 for(size_t i = m_Bufs.size(); i--; )
720 {
721 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
722 }
723}
724
725bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
726{
727 assert(size <= MAX_TOTAL_SIZE);
728
729 // Try to find existing unused buffer with best size.
730 size_t bestIndex = SIZE_MAX;
731 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
732 {
733 BufInfo& currBufInfo = m_Bufs[i];
734 if(!currBufInfo.Used && currBufInfo.Size >= size &&
735 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
736 {
737 bestIndex = i;
738 }
739 }
740
741 if(bestIndex != SIZE_MAX)
742 {
743 m_Bufs[bestIndex].Used = true;
744 outBuffer = m_Bufs[bestIndex].Buffer;
745 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
746 return true;
747 }
748
749 // Allocate new buffer with requested size.
750 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
751 {
752 BufInfo bufInfo;
753 bufInfo.Size = size;
754 bufInfo.Used = true;
755
756 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
757 bufCreateInfo.size = size;
758 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
759
760 VmaAllocationCreateInfo allocCreateInfo = {};
761 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
762 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
763
764 VmaAllocationInfo allocInfo;
765 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
766 bufInfo.MappedPtr = allocInfo.pMappedData;
767 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
768
769 outBuffer = bufInfo.Buffer;
770 outMappedPtr = bufInfo.MappedPtr;
771
772 m_Bufs.push_back(std::move(bufInfo));
773
774 m_TotalSize += size;
775
776 return true;
777 }
778
779 // There are some unused but smaller buffers: Free them and try again.
780 bool hasUnused = false;
781 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
782 {
783 if(!m_Bufs[i].Used)
784 {
785 hasUnused = true;
786 break;
787 }
788 }
789 if(hasUnused)
790 {
791 for(size_t i = m_Bufs.size(); i--; )
792 {
793 if(!m_Bufs[i].Used)
794 {
795 m_TotalSize -= m_Bufs[i].Size;
796 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
797 m_Bufs.erase(m_Bufs.begin() + i);
798 }
799 }
800
801 return AcquireBuffer(size, outBuffer, outMappedPtr);
802 }
803
804 return false;
805}
806
807void StagingBufferCollection::ReleaseAllBuffers()
808{
809 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
810 {
811 m_Bufs[i].Used = false;
812 }
813}
814
815static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
816{
817 StagingBufferCollection stagingBufs;
818
819 bool cmdBufferStarted = false;
820 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
821 {
822 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
823 if(currAllocInfo.m_Buffer)
824 {
825 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
826
827 VkBuffer stagingBuf = VK_NULL_HANDLE;
828 void* stagingBufMappedPtr = nullptr;
829 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
830 {
831 TEST(cmdBufferStarted);
832 EndSingleTimeCommands();
833 stagingBufs.ReleaseAllBuffers();
834 cmdBufferStarted = false;
835
836 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
837 TEST(ok);
838 }
839
840 // Fill staging buffer.
841 {
842 assert(size % sizeof(uint32_t) == 0);
843 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
844 uint32_t val = currAllocInfo.m_StartValue;
845 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
846 {
847 *stagingValPtr = val;
848 ++stagingValPtr;
849 ++val;
850 }
851 }
852
853 // Issue copy command from staging buffer to destination buffer.
854 if(!cmdBufferStarted)
855 {
856 cmdBufferStarted = true;
857 BeginSingleTimeCommands();
858 }
859
860 VkBufferCopy copy = {};
861 copy.srcOffset = 0;
862 copy.dstOffset = 0;
863 copy.size = size;
864 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
865 }
866 else
867 {
868 TEST(0 && "Images not currently supported.");
869 }
870 }
871
872 if(cmdBufferStarted)
873 {
874 EndSingleTimeCommands();
875 stagingBufs.ReleaseAllBuffers();
876 }
877}
878
879static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
880{
881 StagingBufferCollection stagingBufs;
882
883 bool cmdBufferStarted = false;
884 size_t validateAllocIndexOffset = 0;
885 std::vector<void*> validateStagingBuffers;
886 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
887 {
888 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
889 if(currAllocInfo.m_Buffer)
890 {
891 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
892
893 VkBuffer stagingBuf = VK_NULL_HANDLE;
894 void* stagingBufMappedPtr = nullptr;
895 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
896 {
897 TEST(cmdBufferStarted);
898 EndSingleTimeCommands();
899 cmdBufferStarted = false;
900
901 for(size_t validateIndex = 0;
902 validateIndex < validateStagingBuffers.size();
903 ++validateIndex)
904 {
905 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
906 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
907 TEST(validateSize % sizeof(uint32_t) == 0);
908 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
909 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
910 bool valid = true;
911 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
912 {
913 if(*stagingValPtr != val)
914 {
915 valid = false;
916 break;
917 }
918 ++stagingValPtr;
919 ++val;
920 }
921 TEST(valid);
922 }
923
924 stagingBufs.ReleaseAllBuffers();
925
926 validateAllocIndexOffset = allocInfoIndex;
927 validateStagingBuffers.clear();
928
929 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
930 TEST(ok);
931 }
932
933 // Issue copy command from staging buffer to destination buffer.
934 if(!cmdBufferStarted)
935 {
936 cmdBufferStarted = true;
937 BeginSingleTimeCommands();
938 }
939
940 VkBufferCopy copy = {};
941 copy.srcOffset = 0;
942 copy.dstOffset = 0;
943 copy.size = size;
944 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
945
946 // Sava mapped pointer for later validation.
947 validateStagingBuffers.push_back(stagingBufMappedPtr);
948 }
949 else
950 {
951 TEST(0 && "Images not currently supported.");
952 }
953 }
954
955 if(cmdBufferStarted)
956 {
957 EndSingleTimeCommands();
958
959 for(size_t validateIndex = 0;
960 validateIndex < validateStagingBuffers.size();
961 ++validateIndex)
962 {
963 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
964 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
965 TEST(validateSize % sizeof(uint32_t) == 0);
966 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
967 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
968 bool valid = true;
969 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
970 {
971 if(*stagingValPtr != val)
972 {
973 valid = false;
974 break;
975 }
976 ++stagingValPtr;
977 ++val;
978 }
979 TEST(valid);
980 }
981
982 stagingBufs.ReleaseAllBuffers();
983 }
984}
985
Adam Sawickib8333fb2018-03-13 16:15:53 +0100986static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
987{
988 outMemReq = {};
989 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
990 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
991}
992
993static void CreateBuffer(
994 VmaPool pool,
995 const VkBufferCreateInfo& bufCreateInfo,
996 bool persistentlyMapped,
997 AllocInfo& outAllocInfo)
998{
999 outAllocInfo = {};
1000 outAllocInfo.m_BufferInfo = bufCreateInfo;
1001
1002 VmaAllocationCreateInfo allocCreateInfo = {};
1003 allocCreateInfo.pool = pool;
1004 if(persistentlyMapped)
1005 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1006
1007 VmaAllocationInfo vmaAllocInfo = {};
1008 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1009
1010 // Setup StartValue and fill.
1011 {
1012 outAllocInfo.m_StartValue = (uint32_t)rand();
1013 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001014 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015 if(!persistentlyMapped)
1016 {
1017 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1018 }
1019
1020 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001021 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001022 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1023 data[i] = value++;
1024
1025 if(!persistentlyMapped)
1026 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1027 }
1028}
1029
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001030static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001031{
1032 outAllocation.m_Allocation = nullptr;
1033 outAllocation.m_Buffer = nullptr;
1034 outAllocation.m_Image = nullptr;
1035 outAllocation.m_StartValue = (uint32_t)rand();
1036
1037 VmaAllocationCreateInfo vmaMemReq;
1038 GetMemReq(vmaMemReq);
1039
1040 VmaAllocationInfo allocInfo;
1041
1042 const bool isBuffer = true;//(rand() & 0x1) != 0;
1043 const bool isLarge = (rand() % 16) == 0;
1044 if(isBuffer)
1045 {
1046 const uint32_t bufferSize = isLarge ?
1047 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1048 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1049
1050 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1051 bufferInfo.size = bufferSize;
1052 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1053
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001054 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001055 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001056 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001057 }
1058 else
1059 {
1060 const uint32_t imageSizeX = isLarge ?
1061 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1062 rand() % 1024 + 1; // 1 ... 1024
1063 const uint32_t imageSizeY = isLarge ?
1064 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1065 rand() % 1024 + 1; // 1 ... 1024
1066
1067 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1068 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1069 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1070 imageInfo.extent.width = imageSizeX;
1071 imageInfo.extent.height = imageSizeY;
1072 imageInfo.extent.depth = 1;
1073 imageInfo.mipLevels = 1;
1074 imageInfo.arrayLayers = 1;
1075 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1076 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1077 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1078 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1079
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001080 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001081 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001082 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 }
1084
1085 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1086 if(allocInfo.pMappedData == nullptr)
1087 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001088 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001089 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001090 }
1091
1092 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001093 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001094 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1095 data[i] = value++;
1096
1097 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001098 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001099}
1100
1101static void DestroyAllocation(const AllocInfo& allocation)
1102{
1103 if(allocation.m_Buffer)
1104 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1105 else
1106 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1107}
1108
1109static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1110{
1111 for(size_t i = allocations.size(); i--; )
1112 DestroyAllocation(allocations[i]);
1113 allocations.clear();
1114}
1115
1116static void ValidateAllocationData(const AllocInfo& allocation)
1117{
1118 VmaAllocationInfo allocInfo;
1119 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1120
1121 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1122 if(allocInfo.pMappedData == nullptr)
1123 {
1124 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126 }
1127
1128 uint32_t value = allocation.m_StartValue;
1129 bool ok = true;
1130 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001131 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001132 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1133 {
1134 if(data[i] != value++)
1135 {
1136 ok = false;
1137 break;
1138 }
1139 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001140 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001141
1142 if(allocInfo.pMappedData == nullptr)
1143 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1144}
1145
1146static void RecreateAllocationResource(AllocInfo& allocation)
1147{
1148 VmaAllocationInfo allocInfo;
1149 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1150
1151 if(allocation.m_Buffer)
1152 {
1153 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1154
1155 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001156 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001157
1158 // Just to silence validation layer warnings.
1159 VkMemoryRequirements vkMemReq;
1160 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001161 TEST(vkMemReq.size == allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001162
1163 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001164 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001165 }
1166 else
1167 {
1168 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1169
1170 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001171 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001172
1173 // Just to silence validation layer warnings.
1174 VkMemoryRequirements vkMemReq;
1175 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1176
1177 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001178 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001179 }
1180}
1181
1182static void Defragment(AllocInfo* allocs, size_t allocCount,
1183 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1184 VmaDefragmentationStats* defragmentationStats = nullptr)
1185{
1186 std::vector<VmaAllocation> vmaAllocs(allocCount);
1187 for(size_t i = 0; i < allocCount; ++i)
1188 vmaAllocs[i] = allocs[i].m_Allocation;
1189
1190 std::vector<VkBool32> allocChanged(allocCount);
1191
1192 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1193 defragmentationInfo, defragmentationStats) );
1194
1195 for(size_t i = 0; i < allocCount; ++i)
1196 {
1197 if(allocChanged[i])
1198 {
1199 RecreateAllocationResource(allocs[i]);
1200 }
1201 }
1202}
1203
1204static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1205{
1206 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1207 ValidateAllocationData(allocInfo);
1208 });
1209}
1210
1211void TestDefragmentationSimple()
1212{
1213 wprintf(L"Test defragmentation simple\n");
1214
1215 RandomNumberGenerator rand(667);
1216
1217 const VkDeviceSize BUF_SIZE = 0x10000;
1218 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1219
1220 const VkDeviceSize MIN_BUF_SIZE = 32;
1221 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1222 auto RandomBufSize = [&]() -> VkDeviceSize {
1223 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1224 };
1225
1226 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1227 bufCreateInfo.size = BUF_SIZE;
1228 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1229
1230 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1231 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1232
1233 uint32_t memTypeIndex = UINT32_MAX;
1234 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1235
1236 VmaPoolCreateInfo poolCreateInfo = {};
1237 poolCreateInfo.blockSize = BLOCK_SIZE;
1238 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1239
1240 VmaPool pool;
1241 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1242
Adam Sawickie1681912018-11-23 17:50:12 +01001243 // Defragmentation of empty pool.
1244 {
1245 VmaDefragmentationInfo2 defragInfo = {};
1246 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1247 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1248 defragInfo.poolCount = 1;
1249 defragInfo.pPools = &pool;
1250
1251 VmaDefragmentationStats defragStats = {};
1252 VmaDefragmentationContext defragCtx = nullptr;
1253 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1254 TEST(res >= VK_SUCCESS);
1255 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1256 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1257 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1258 }
1259
Adam Sawickib8333fb2018-03-13 16:15:53 +01001260 std::vector<AllocInfo> allocations;
1261
1262 // persistentlyMappedOption = 0 - not persistently mapped.
1263 // persistentlyMappedOption = 1 - persistently mapped.
1264 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1265 {
1266 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1267 const bool persistentlyMapped = persistentlyMappedOption != 0;
1268
1269 // # Test 1
1270 // Buffers of fixed size.
1271 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1272 // Expected result: at least 1 block freed.
1273 {
1274 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1275 {
1276 AllocInfo allocInfo;
1277 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1278 allocations.push_back(allocInfo);
1279 }
1280
1281 for(size_t i = 1; i < allocations.size(); ++i)
1282 {
1283 DestroyAllocation(allocations[i]);
1284 allocations.erase(allocations.begin() + i);
1285 }
1286
1287 VmaDefragmentationStats defragStats;
1288 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001289 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1290 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001291
1292 ValidateAllocationsData(allocations.data(), allocations.size());
1293
1294 DestroyAllAllocations(allocations);
1295 }
1296
1297 // # Test 2
1298 // Buffers of fixed size.
1299 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1300 // Expected result: Each of 4 interations makes some progress.
1301 {
1302 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1303 {
1304 AllocInfo allocInfo;
1305 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1306 allocations.push_back(allocInfo);
1307 }
1308
1309 for(size_t i = 1; i < allocations.size(); ++i)
1310 {
1311 DestroyAllocation(allocations[i]);
1312 allocations.erase(allocations.begin() + i);
1313 }
1314
1315 VmaDefragmentationInfo defragInfo = {};
1316 defragInfo.maxAllocationsToMove = 1;
1317 defragInfo.maxBytesToMove = BUF_SIZE;
1318
1319 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1320 {
1321 VmaDefragmentationStats defragStats;
1322 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001323 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001324 }
1325
1326 ValidateAllocationsData(allocations.data(), allocations.size());
1327
1328 DestroyAllAllocations(allocations);
1329 }
1330
1331 // # Test 3
1332 // Buffers of variable size.
1333 // Create a number of buffers. Remove some percent of them.
1334 // Defragment while having some percent of them unmovable.
1335 // Expected result: Just simple validation.
1336 {
1337 for(size_t i = 0; i < 100; ++i)
1338 {
1339 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1340 localBufCreateInfo.size = RandomBufSize();
1341
1342 AllocInfo allocInfo;
1343 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1344 allocations.push_back(allocInfo);
1345 }
1346
1347 const uint32_t percentToDelete = 60;
1348 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1349 for(size_t i = 0; i < numberToDelete; ++i)
1350 {
1351 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1352 DestroyAllocation(allocations[indexToDelete]);
1353 allocations.erase(allocations.begin() + indexToDelete);
1354 }
1355
1356 // Non-movable allocations will be at the beginning of allocations array.
1357 const uint32_t percentNonMovable = 20;
1358 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1359 for(size_t i = 0; i < numberNonMovable; ++i)
1360 {
1361 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1362 if(indexNonMovable != i)
1363 std::swap(allocations[i], allocations[indexNonMovable]);
1364 }
1365
1366 VmaDefragmentationStats defragStats;
1367 Defragment(
1368 allocations.data() + numberNonMovable,
1369 allocations.size() - numberNonMovable,
1370 nullptr, &defragStats);
1371
1372 ValidateAllocationsData(allocations.data(), allocations.size());
1373
1374 DestroyAllAllocations(allocations);
1375 }
1376 }
1377
1378 vmaDestroyPool(g_hAllocator, pool);
1379}
1380
Adam Sawicki52076eb2018-11-22 16:14:50 +01001381void TestDefragmentationWholePool()
1382{
1383 wprintf(L"Test defragmentation whole pool\n");
1384
1385 RandomNumberGenerator rand(668);
1386
1387 const VkDeviceSize BUF_SIZE = 0x10000;
1388 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1389
1390 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1391 bufCreateInfo.size = BUF_SIZE;
1392 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1393
1394 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1395 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1396
1397 uint32_t memTypeIndex = UINT32_MAX;
1398 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1399
1400 VmaPoolCreateInfo poolCreateInfo = {};
1401 poolCreateInfo.blockSize = BLOCK_SIZE;
1402 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1403
1404 VmaDefragmentationStats defragStats[2];
1405 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1406 {
1407 VmaPool pool;
1408 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1409
1410 std::vector<AllocInfo> allocations;
1411
1412 // Buffers of fixed size.
1413 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1414 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1415 {
1416 AllocInfo allocInfo;
1417 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1418 allocations.push_back(allocInfo);
1419 }
1420
1421 for(size_t i = 1; i < allocations.size(); ++i)
1422 {
1423 DestroyAllocation(allocations[i]);
1424 allocations.erase(allocations.begin() + i);
1425 }
1426
1427 VmaDefragmentationInfo2 defragInfo = {};
1428 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1429 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1430 std::vector<VmaAllocation> allocationsToDefrag;
1431 if(caseIndex == 0)
1432 {
1433 defragInfo.poolCount = 1;
1434 defragInfo.pPools = &pool;
1435 }
1436 else
1437 {
1438 const size_t allocCount = allocations.size();
1439 allocationsToDefrag.resize(allocCount);
1440 std::transform(
1441 allocations.begin(), allocations.end(),
1442 allocationsToDefrag.begin(),
1443 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1444 defragInfo.allocationCount = (uint32_t)allocCount;
1445 defragInfo.pAllocations = allocationsToDefrag.data();
1446 }
1447
1448 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1449 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1450 TEST(res >= VK_SUCCESS);
1451 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1452
1453 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1454
1455 ValidateAllocationsData(allocations.data(), allocations.size());
1456
1457 DestroyAllAllocations(allocations);
1458
1459 vmaDestroyPool(g_hAllocator, pool);
1460 }
1461
1462 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1463 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1464 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1465 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1466}
1467
Adam Sawickib8333fb2018-03-13 16:15:53 +01001468void TestDefragmentationFull()
1469{
1470 std::vector<AllocInfo> allocations;
1471
1472 // Create initial allocations.
1473 for(size_t i = 0; i < 400; ++i)
1474 {
1475 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001476 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001477 allocations.push_back(allocation);
1478 }
1479
1480 // Delete random allocations
1481 const size_t allocationsToDeletePercent = 80;
1482 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1483 for(size_t i = 0; i < allocationsToDelete; ++i)
1484 {
1485 size_t index = (size_t)rand() % allocations.size();
1486 DestroyAllocation(allocations[index]);
1487 allocations.erase(allocations.begin() + index);
1488 }
1489
1490 for(size_t i = 0; i < allocations.size(); ++i)
1491 ValidateAllocationData(allocations[i]);
1492
Adam Sawicki0667e332018-08-24 17:26:44 +02001493 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001494
1495 {
1496 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1497 for(size_t i = 0; i < allocations.size(); ++i)
1498 vmaAllocations[i] = allocations[i].m_Allocation;
1499
1500 const size_t nonMovablePercent = 0;
1501 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1502 for(size_t i = 0; i < nonMovableCount; ++i)
1503 {
1504 size_t index = (size_t)rand() % vmaAllocations.size();
1505 vmaAllocations.erase(vmaAllocations.begin() + index);
1506 }
1507
1508 const uint32_t defragCount = 1;
1509 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1510 {
1511 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1512
1513 VmaDefragmentationInfo defragmentationInfo;
1514 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1515 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1516
1517 wprintf(L"Defragmentation #%u\n", defragIndex);
1518
1519 time_point begTime = std::chrono::high_resolution_clock::now();
1520
1521 VmaDefragmentationStats stats;
1522 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001523 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001524
1525 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1526
1527 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1528 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1529 wprintf(L"Time: %.2f s\n", defragmentDuration);
1530
1531 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1532 {
1533 if(allocationsChanged[i])
1534 {
1535 RecreateAllocationResource(allocations[i]);
1536 }
1537 }
1538
1539 for(size_t i = 0; i < allocations.size(); ++i)
1540 ValidateAllocationData(allocations[i]);
1541
Adam Sawicki0667e332018-08-24 17:26:44 +02001542 //wchar_t fileName[MAX_PATH];
1543 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1544 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001545 }
1546 }
1547
1548 // Destroy all remaining allocations.
1549 DestroyAllAllocations(allocations);
1550}
1551
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001552static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001553{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001554 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001555 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001556
1557 std::vector<AllocInfo> allocations;
1558
1559 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001560 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1561 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001562 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001563 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1564 const size_t percentToLeave = 30;
1565 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001566 RandomNumberGenerator rand = { 234522 };
1567
1568 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001569
1570 VmaAllocationCreateInfo allocCreateInfo = {};
1571 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001572 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001573
1574 // Create all intended buffers.
1575 for(size_t i = 0; i < bufCount; ++i)
1576 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001577 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1578
1579 if(rand.Generate() % 100 < percentNonMovable)
1580 {
1581 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1582 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1583 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1584 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1585 }
1586 else
1587 {
1588 // Different usage just to see different color in output from VmaDumpVis.
1589 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1590 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1591 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1592 // And in JSON dump.
1593 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1594 }
1595
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001596 AllocInfo alloc;
1597 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1598 alloc.m_StartValue = rand.Generate();
1599 allocations.push_back(alloc);
1600 }
1601
1602 // Destroy some percentage of them.
1603 {
1604 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1605 for(size_t i = 0; i < buffersToDestroy; ++i)
1606 {
1607 const size_t index = rand.Generate() % allocations.size();
1608 allocations[index].Destroy();
1609 allocations.erase(allocations.begin() + index);
1610 }
1611 }
1612
1613 // Fill them with meaningful data.
1614 UploadGpuData(allocations.data(), allocations.size());
1615
Adam Sawickic6ede152018-11-16 17:04:14 +01001616 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001617 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001618 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001619
1620 // Defragment using GPU only.
1621 {
1622 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001623
Adam Sawickic6ede152018-11-16 17:04:14 +01001624 std::vector<VmaAllocation> allocationPtrs;
1625 std::vector<VkBool32> allocationChanged;
1626 std::vector<size_t> allocationOriginalIndex;
1627
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001628 for(size_t i = 0; i < allocCount; ++i)
1629 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001630 VmaAllocationInfo allocInfo = {};
1631 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1632 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1633 {
1634 allocationPtrs.push_back(allocations[i].m_Allocation);
1635 allocationChanged.push_back(VK_FALSE);
1636 allocationOriginalIndex.push_back(i);
1637 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001638 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001639
1640 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001641
1642 BeginSingleTimeCommands();
1643
1644 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001645 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001646 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001647 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001648 defragInfo.pAllocationsChanged = allocationChanged.data();
1649 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001650 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1651 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1652
1653 VmaDefragmentationStats stats = {};
1654 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1655 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1656 TEST(res >= VK_SUCCESS);
1657
1658 EndSingleTimeCommands();
1659
1660 vmaDefragmentationEnd(g_hAllocator, ctx);
1661
Adam Sawickic6ede152018-11-16 17:04:14 +01001662 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001663 {
1664 if(allocationChanged[i])
1665 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001666 const size_t origAllocIndex = allocationOriginalIndex[i];
1667 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001668 }
1669 }
1670
Adam Sawicki440307e2018-10-18 15:05:19 +02001671 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1672 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001673 }
1674
1675 ValidateGpuData(allocations.data(), allocations.size());
1676
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001677 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001678 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001679
1680 // Destroy all remaining buffers.
1681 for(size_t i = allocations.size(); i--; )
1682 {
1683 allocations[i].Destroy();
1684 }
Adam Sawicki05704002018-11-08 16:07:29 +01001685
1686 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001687}
1688
Adam Sawickib8333fb2018-03-13 16:15:53 +01001689static void TestUserData()
1690{
1691 VkResult res;
1692
1693 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1694 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1695 bufCreateInfo.size = 0x10000;
1696
1697 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1698 {
1699 // Opaque pointer
1700 {
1701
1702 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1703 void* pointerToSomething = &res;
1704
1705 VmaAllocationCreateInfo allocCreateInfo = {};
1706 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1707 allocCreateInfo.pUserData = numberAsPointer;
1708 if(testIndex == 1)
1709 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1710
1711 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1712 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001713 TEST(res == VK_SUCCESS);
1714 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001715
1716 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001717 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001718
1719 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1720 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001721 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001722
1723 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1724 }
1725
1726 // String
1727 {
1728 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1729 const char* name2 = "2";
1730 const size_t name1Len = strlen(name1);
1731
1732 char* name1Buf = new char[name1Len + 1];
1733 strcpy_s(name1Buf, name1Len + 1, name1);
1734
1735 VmaAllocationCreateInfo allocCreateInfo = {};
1736 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1737 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1738 allocCreateInfo.pUserData = name1Buf;
1739 if(testIndex == 1)
1740 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1741
1742 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1743 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001744 TEST(res == VK_SUCCESS);
1745 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1746 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001747
1748 delete[] name1Buf;
1749
1750 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001751 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001752
1753 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1754 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001755 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001756
1757 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1758 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001759 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001760
1761 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1762 }
1763 }
1764}
1765
Adam Sawicki370ab182018-11-08 16:31:00 +01001766static void TestInvalidAllocations()
1767{
1768 VkResult res;
1769
1770 VmaAllocationCreateInfo allocCreateInfo = {};
1771 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1772
1773 // Try to allocate 0 bytes.
1774 {
1775 VkMemoryRequirements memReq = {};
1776 memReq.size = 0; // !!!
1777 memReq.alignment = 4;
1778 memReq.memoryTypeBits = UINT32_MAX;
1779 VmaAllocation alloc = VK_NULL_HANDLE;
1780 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1781 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1782 }
1783
1784 // Try to create buffer with size = 0.
1785 {
1786 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1787 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1788 bufCreateInfo.size = 0; // !!!
1789 VkBuffer buf = VK_NULL_HANDLE;
1790 VmaAllocation alloc = VK_NULL_HANDLE;
1791 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1792 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1793 }
1794
1795 // Try to create image with one dimension = 0.
1796 {
1797 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1798 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1799 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1800 imageCreateInfo.extent.width = 128;
1801 imageCreateInfo.extent.height = 0; // !!!
1802 imageCreateInfo.extent.depth = 1;
1803 imageCreateInfo.mipLevels = 1;
1804 imageCreateInfo.arrayLayers = 1;
1805 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1806 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1807 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1808 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1809 VkImage image = VK_NULL_HANDLE;
1810 VmaAllocation alloc = VK_NULL_HANDLE;
1811 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1812 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1813 }
1814}
1815
Adam Sawickib8333fb2018-03-13 16:15:53 +01001816static void TestMemoryRequirements()
1817{
1818 VkResult res;
1819 VkBuffer buf;
1820 VmaAllocation alloc;
1821 VmaAllocationInfo allocInfo;
1822
1823 const VkPhysicalDeviceMemoryProperties* memProps;
1824 vmaGetMemoryProperties(g_hAllocator, &memProps);
1825
1826 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1827 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1828 bufInfo.size = 128;
1829
1830 VmaAllocationCreateInfo allocCreateInfo = {};
1831
1832 // No requirements.
1833 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001834 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001835 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1836
1837 // Usage.
1838 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1839 allocCreateInfo.requiredFlags = 0;
1840 allocCreateInfo.preferredFlags = 0;
1841 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1842
1843 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001844 TEST(res == VK_SUCCESS);
1845 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001846 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1847
1848 // Required flags, preferred flags.
1849 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1850 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1851 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1852 allocCreateInfo.memoryTypeBits = 0;
1853
1854 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001855 TEST(res == VK_SUCCESS);
1856 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1857 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001858 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1859
1860 // memoryTypeBits.
1861 const uint32_t memType = allocInfo.memoryType;
1862 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1863 allocCreateInfo.requiredFlags = 0;
1864 allocCreateInfo.preferredFlags = 0;
1865 allocCreateInfo.memoryTypeBits = 1u << memType;
1866
1867 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001868 TEST(res == VK_SUCCESS);
1869 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001870 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1871
1872}
1873
1874static void TestBasics()
1875{
1876 VkResult res;
1877
1878 TestMemoryRequirements();
1879
1880 // Lost allocation
1881 {
1882 VmaAllocation alloc = VK_NULL_HANDLE;
1883 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001884 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001885
1886 VmaAllocationInfo allocInfo;
1887 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001888 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1889 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001890
1891 vmaFreeMemory(g_hAllocator, alloc);
1892 }
1893
1894 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1895 {
1896 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1897 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1898 bufCreateInfo.size = 128;
1899
1900 VmaAllocationCreateInfo allocCreateInfo = {};
1901 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1902 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1903
1904 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1905 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001906 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001907
1908 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1909
1910 // Same with OWN_MEMORY.
1911 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1912
1913 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001914 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001915
1916 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1917 }
1918
1919 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001920
1921 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001922}
1923
1924void TestHeapSizeLimit()
1925{
1926 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1927 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1928
1929 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1930 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1931 {
1932 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1933 }
1934
1935 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1936 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1937 allocatorCreateInfo.device = g_hDevice;
1938 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1939
1940 VmaAllocator hAllocator;
1941 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001942 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001943
1944 struct Item
1945 {
1946 VkBuffer hBuf;
1947 VmaAllocation hAlloc;
1948 };
1949 std::vector<Item> items;
1950
1951 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1952 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1953
1954 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
1955 VmaAllocationInfo ownAllocInfo;
1956 {
1957 VmaAllocationCreateInfo allocCreateInfo = {};
1958 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1959 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1960
1961 bufCreateInfo.size = BLOCK_SIZE / 2;
1962
1963 for(size_t i = 0; i < 2; ++i)
1964 {
1965 Item item;
1966 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001967 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001968 items.push_back(item);
1969 }
1970 }
1971
1972 // Create pool to make sure allocations must be out of this memory type.
1973 VmaPoolCreateInfo poolCreateInfo = {};
1974 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
1975 poolCreateInfo.blockSize = BLOCK_SIZE;
1976
1977 VmaPool hPool;
1978 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001979 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001980
1981 // 2. Allocate normal buffers from all the remaining memory.
1982 {
1983 VmaAllocationCreateInfo allocCreateInfo = {};
1984 allocCreateInfo.pool = hPool;
1985
1986 bufCreateInfo.size = BLOCK_SIZE / 2;
1987
1988 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
1989 for(size_t i = 0; i < bufCount; ++i)
1990 {
1991 Item item;
1992 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001993 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001994 items.push_back(item);
1995 }
1996 }
1997
1998 // 3. Allocation of one more (even small) buffer should fail.
1999 {
2000 VmaAllocationCreateInfo allocCreateInfo = {};
2001 allocCreateInfo.pool = hPool;
2002
2003 bufCreateInfo.size = 128;
2004
2005 VkBuffer hBuf;
2006 VmaAllocation hAlloc;
2007 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002008 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002009 }
2010
2011 // Destroy everything.
2012 for(size_t i = items.size(); i--; )
2013 {
2014 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2015 }
2016
2017 vmaDestroyPool(hAllocator, hPool);
2018
2019 vmaDestroyAllocator(hAllocator);
2020}
2021
Adam Sawicki212a4a62018-06-14 15:44:45 +02002022#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002023static void TestDebugMargin()
2024{
2025 if(VMA_DEBUG_MARGIN == 0)
2026 {
2027 return;
2028 }
2029
2030 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002031 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002032
2033 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002034 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002035
2036 // Create few buffers of different size.
2037 const size_t BUF_COUNT = 10;
2038 BufferInfo buffers[BUF_COUNT];
2039 VmaAllocationInfo allocInfo[BUF_COUNT];
2040 for(size_t i = 0; i < 10; ++i)
2041 {
2042 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002043 // Last one will be mapped.
2044 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002045
2046 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002047 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002048 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002049 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002050
2051 if(i == BUF_COUNT - 1)
2052 {
2053 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002054 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002055 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2056 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2057 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002058 }
2059
2060 // Check if their offsets preserve margin between them.
2061 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2062 {
2063 if(lhs.deviceMemory != rhs.deviceMemory)
2064 {
2065 return lhs.deviceMemory < rhs.deviceMemory;
2066 }
2067 return lhs.offset < rhs.offset;
2068 });
2069 for(size_t i = 1; i < BUF_COUNT; ++i)
2070 {
2071 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2072 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002073 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002074 }
2075 }
2076
Adam Sawicki212a4a62018-06-14 15:44:45 +02002077 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002078 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002079
Adam Sawicki73b16652018-06-11 16:39:25 +02002080 // Destroy all buffers.
2081 for(size_t i = BUF_COUNT; i--; )
2082 {
2083 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2084 }
2085}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002086#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002087
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002088static void TestLinearAllocator()
2089{
2090 wprintf(L"Test linear allocator\n");
2091
2092 RandomNumberGenerator rand{645332};
2093
2094 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2095 sampleBufCreateInfo.size = 1024; // Whatever.
2096 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2097
2098 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2099 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2100
2101 VmaPoolCreateInfo poolCreateInfo = {};
2102 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002103 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002104
Adam Sawickiee082772018-06-20 17:45:49 +02002105 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002106 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2107 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2108
2109 VmaPool pool = nullptr;
2110 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002111 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002112
2113 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2114
2115 VmaAllocationCreateInfo allocCreateInfo = {};
2116 allocCreateInfo.pool = pool;
2117
2118 constexpr size_t maxBufCount = 100;
2119 std::vector<BufferInfo> bufInfo;
2120
2121 constexpr VkDeviceSize bufSizeMin = 16;
2122 constexpr VkDeviceSize bufSizeMax = 1024;
2123 VmaAllocationInfo allocInfo;
2124 VkDeviceSize prevOffset = 0;
2125
2126 // Test one-time free.
2127 for(size_t i = 0; i < 2; ++i)
2128 {
2129 // Allocate number of buffers of varying size that surely fit into this block.
2130 VkDeviceSize bufSumSize = 0;
2131 for(size_t i = 0; i < maxBufCount; ++i)
2132 {
2133 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2134 BufferInfo newBufInfo;
2135 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2136 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002137 TEST(res == VK_SUCCESS);
2138 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002139 bufInfo.push_back(newBufInfo);
2140 prevOffset = allocInfo.offset;
2141 bufSumSize += bufCreateInfo.size;
2142 }
2143
2144 // Validate pool stats.
2145 VmaPoolStats stats;
2146 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002147 TEST(stats.size == poolCreateInfo.blockSize);
2148 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2149 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002150
2151 // Destroy the buffers in random order.
2152 while(!bufInfo.empty())
2153 {
2154 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2155 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2156 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2157 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2158 }
2159 }
2160
2161 // Test stack.
2162 {
2163 // Allocate number of buffers of varying size that surely fit into this block.
2164 for(size_t i = 0; i < maxBufCount; ++i)
2165 {
2166 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2167 BufferInfo newBufInfo;
2168 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2169 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002170 TEST(res == VK_SUCCESS);
2171 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002172 bufInfo.push_back(newBufInfo);
2173 prevOffset = allocInfo.offset;
2174 }
2175
2176 // Destroy few buffers from top of the stack.
2177 for(size_t i = 0; i < maxBufCount / 5; ++i)
2178 {
2179 const BufferInfo& currBufInfo = bufInfo.back();
2180 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2181 bufInfo.pop_back();
2182 }
2183
2184 // Create some more
2185 for(size_t i = 0; i < maxBufCount / 5; ++i)
2186 {
2187 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2188 BufferInfo newBufInfo;
2189 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2190 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002191 TEST(res == VK_SUCCESS);
2192 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002193 bufInfo.push_back(newBufInfo);
2194 prevOffset = allocInfo.offset;
2195 }
2196
2197 // Destroy the buffers in reverse order.
2198 while(!bufInfo.empty())
2199 {
2200 const BufferInfo& currBufInfo = bufInfo.back();
2201 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2202 bufInfo.pop_back();
2203 }
2204 }
2205
Adam Sawickiee082772018-06-20 17:45:49 +02002206 // Test ring buffer.
2207 {
2208 // Allocate number of buffers that surely fit into this block.
2209 bufCreateInfo.size = bufSizeMax;
2210 for(size_t i = 0; i < maxBufCount; ++i)
2211 {
2212 BufferInfo newBufInfo;
2213 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2214 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002215 TEST(res == VK_SUCCESS);
2216 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002217 bufInfo.push_back(newBufInfo);
2218 prevOffset = allocInfo.offset;
2219 }
2220
2221 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2222 const size_t buffersPerIter = maxBufCount / 10 - 1;
2223 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2224 for(size_t iter = 0; iter < iterCount; ++iter)
2225 {
2226 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2227 {
2228 const BufferInfo& currBufInfo = bufInfo.front();
2229 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2230 bufInfo.erase(bufInfo.begin());
2231 }
2232 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2233 {
2234 BufferInfo newBufInfo;
2235 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2236 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002237 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002238 bufInfo.push_back(newBufInfo);
2239 }
2240 }
2241
2242 // Allocate buffers until we reach out-of-memory.
2243 uint32_t debugIndex = 0;
2244 while(res == VK_SUCCESS)
2245 {
2246 BufferInfo newBufInfo;
2247 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2248 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2249 if(res == VK_SUCCESS)
2250 {
2251 bufInfo.push_back(newBufInfo);
2252 }
2253 else
2254 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002255 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002256 }
2257 ++debugIndex;
2258 }
2259
2260 // Destroy the buffers in random order.
2261 while(!bufInfo.empty())
2262 {
2263 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2264 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2265 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2266 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2267 }
2268 }
2269
Adam Sawicki680b2252018-08-22 14:47:32 +02002270 // Test double stack.
2271 {
2272 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2273 VkDeviceSize prevOffsetLower = 0;
2274 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2275 for(size_t i = 0; i < maxBufCount; ++i)
2276 {
2277 const bool upperAddress = (i % 2) != 0;
2278 if(upperAddress)
2279 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2280 else
2281 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2282 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2283 BufferInfo newBufInfo;
2284 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2285 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002286 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002287 if(upperAddress)
2288 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002289 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002290 prevOffsetUpper = allocInfo.offset;
2291 }
2292 else
2293 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002294 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002295 prevOffsetLower = allocInfo.offset;
2296 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002297 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002298 bufInfo.push_back(newBufInfo);
2299 }
2300
2301 // Destroy few buffers from top of the stack.
2302 for(size_t i = 0; i < maxBufCount / 5; ++i)
2303 {
2304 const BufferInfo& currBufInfo = bufInfo.back();
2305 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2306 bufInfo.pop_back();
2307 }
2308
2309 // Create some more
2310 for(size_t i = 0; i < maxBufCount / 5; ++i)
2311 {
2312 const bool upperAddress = (i % 2) != 0;
2313 if(upperAddress)
2314 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2315 else
2316 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2317 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2318 BufferInfo newBufInfo;
2319 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2320 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002321 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002322 bufInfo.push_back(newBufInfo);
2323 }
2324
2325 // Destroy the buffers in reverse order.
2326 while(!bufInfo.empty())
2327 {
2328 const BufferInfo& currBufInfo = bufInfo.back();
2329 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2330 bufInfo.pop_back();
2331 }
2332
2333 // Create buffers on both sides until we reach out of memory.
2334 prevOffsetLower = 0;
2335 prevOffsetUpper = poolCreateInfo.blockSize;
2336 res = VK_SUCCESS;
2337 for(size_t i = 0; res == VK_SUCCESS; ++i)
2338 {
2339 const bool upperAddress = (i % 2) != 0;
2340 if(upperAddress)
2341 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2342 else
2343 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2344 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2345 BufferInfo newBufInfo;
2346 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2347 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2348 if(res == VK_SUCCESS)
2349 {
2350 if(upperAddress)
2351 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002352 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002353 prevOffsetUpper = allocInfo.offset;
2354 }
2355 else
2356 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002357 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002358 prevOffsetLower = allocInfo.offset;
2359 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002360 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002361 bufInfo.push_back(newBufInfo);
2362 }
2363 }
2364
2365 // Destroy the buffers in random order.
2366 while(!bufInfo.empty())
2367 {
2368 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2369 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2370 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2371 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2372 }
2373
2374 // Create buffers on upper side only, constant size, until we reach out of memory.
2375 prevOffsetUpper = poolCreateInfo.blockSize;
2376 res = VK_SUCCESS;
2377 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2378 bufCreateInfo.size = bufSizeMax;
2379 for(size_t i = 0; res == VK_SUCCESS; ++i)
2380 {
2381 BufferInfo newBufInfo;
2382 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2383 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2384 if(res == VK_SUCCESS)
2385 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002386 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002387 prevOffsetUpper = allocInfo.offset;
2388 bufInfo.push_back(newBufInfo);
2389 }
2390 }
2391
2392 // Destroy the buffers in reverse order.
2393 while(!bufInfo.empty())
2394 {
2395 const BufferInfo& currBufInfo = bufInfo.back();
2396 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2397 bufInfo.pop_back();
2398 }
2399 }
2400
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002401 // Test ring buffer with lost allocations.
2402 {
2403 // Allocate number of buffers until pool is full.
2404 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2405 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2406 res = VK_SUCCESS;
2407 for(size_t i = 0; res == VK_SUCCESS; ++i)
2408 {
2409 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2410
2411 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2412
2413 BufferInfo newBufInfo;
2414 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2415 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2416 if(res == VK_SUCCESS)
2417 bufInfo.push_back(newBufInfo);
2418 }
2419
2420 // Free first half of it.
2421 {
2422 const size_t buffersToDelete = bufInfo.size() / 2;
2423 for(size_t i = 0; i < buffersToDelete; ++i)
2424 {
2425 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2426 }
2427 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2428 }
2429
2430 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002431 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002432 res = VK_SUCCESS;
2433 for(size_t i = 0; res == VK_SUCCESS; ++i)
2434 {
2435 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2436
2437 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2438
2439 BufferInfo newBufInfo;
2440 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2441 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2442 if(res == VK_SUCCESS)
2443 bufInfo.push_back(newBufInfo);
2444 }
2445
2446 VkDeviceSize firstNewOffset;
2447 {
2448 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2449
2450 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2451 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2452 bufCreateInfo.size = bufSizeMax;
2453
2454 BufferInfo newBufInfo;
2455 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2456 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002457 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002458 bufInfo.push_back(newBufInfo);
2459 firstNewOffset = allocInfo.offset;
2460
2461 // Make sure at least one buffer from the beginning became lost.
2462 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002463 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002464 }
2465
2466 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2467 size_t newCount = 1;
2468 for(;;)
2469 {
2470 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2471
2472 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2473
2474 BufferInfo newBufInfo;
2475 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2476 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002477 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002478 bufInfo.push_back(newBufInfo);
2479 ++newCount;
2480 if(allocInfo.offset < firstNewOffset)
2481 break;
2482 }
2483
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002484 // Delete buffers that are lost.
2485 for(size_t i = bufInfo.size(); i--; )
2486 {
2487 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2488 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2489 {
2490 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2491 bufInfo.erase(bufInfo.begin() + i);
2492 }
2493 }
2494
2495 // Test vmaMakePoolAllocationsLost
2496 {
2497 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2498
2499 size_t lostAllocCount = SIZE_MAX;
2500 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002501 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002502
2503 size_t realLostAllocCount = 0;
2504 for(size_t i = 0; i < bufInfo.size(); ++i)
2505 {
2506 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2507 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2508 ++realLostAllocCount;
2509 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002510 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002511 }
2512
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002513 // Destroy all the buffers in forward order.
2514 for(size_t i = 0; i < bufInfo.size(); ++i)
2515 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2516 bufInfo.clear();
2517 }
2518
Adam Sawicki70a683e2018-08-24 15:36:32 +02002519 vmaDestroyPool(g_hAllocator, pool);
2520}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002521
Adam Sawicki70a683e2018-08-24 15:36:32 +02002522static void TestLinearAllocatorMultiBlock()
2523{
2524 wprintf(L"Test linear allocator multi block\n");
2525
2526 RandomNumberGenerator rand{345673};
2527
2528 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2529 sampleBufCreateInfo.size = 1024 * 1024;
2530 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2531
2532 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2533 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2534
2535 VmaPoolCreateInfo poolCreateInfo = {};
2536 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2537 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002538 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002539
2540 VmaPool pool = nullptr;
2541 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002542 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002543
2544 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2545
2546 VmaAllocationCreateInfo allocCreateInfo = {};
2547 allocCreateInfo.pool = pool;
2548
2549 std::vector<BufferInfo> bufInfo;
2550 VmaAllocationInfo allocInfo;
2551
2552 // Test one-time free.
2553 {
2554 // Allocate buffers until we move to a second block.
2555 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2556 for(uint32_t i = 0; ; ++i)
2557 {
2558 BufferInfo newBufInfo;
2559 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2560 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002561 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002562 bufInfo.push_back(newBufInfo);
2563 if(lastMem && allocInfo.deviceMemory != lastMem)
2564 {
2565 break;
2566 }
2567 lastMem = allocInfo.deviceMemory;
2568 }
2569
Adam Sawickib8d34d52018-10-03 17:41:20 +02002570 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002571
2572 // Make sure that pool has now two blocks.
2573 VmaPoolStats poolStats = {};
2574 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002575 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002576
2577 // Destroy all the buffers in random order.
2578 while(!bufInfo.empty())
2579 {
2580 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2581 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2582 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2583 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2584 }
2585
2586 // Make sure that pool has now at most one block.
2587 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002588 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002589 }
2590
2591 // Test stack.
2592 {
2593 // Allocate buffers until we move to a second block.
2594 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2595 for(uint32_t i = 0; ; ++i)
2596 {
2597 BufferInfo newBufInfo;
2598 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2599 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002600 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002601 bufInfo.push_back(newBufInfo);
2602 if(lastMem && allocInfo.deviceMemory != lastMem)
2603 {
2604 break;
2605 }
2606 lastMem = allocInfo.deviceMemory;
2607 }
2608
Adam Sawickib8d34d52018-10-03 17:41:20 +02002609 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002610
2611 // Add few more buffers.
2612 for(uint32_t i = 0; i < 5; ++i)
2613 {
2614 BufferInfo newBufInfo;
2615 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2616 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002617 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002618 bufInfo.push_back(newBufInfo);
2619 }
2620
2621 // Make sure that pool has now two blocks.
2622 VmaPoolStats poolStats = {};
2623 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002624 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002625
2626 // Delete half of buffers, LIFO.
2627 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2628 {
2629 const BufferInfo& currBufInfo = bufInfo.back();
2630 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2631 bufInfo.pop_back();
2632 }
2633
2634 // Add one more buffer.
2635 BufferInfo newBufInfo;
2636 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2637 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002638 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002639 bufInfo.push_back(newBufInfo);
2640
2641 // Make sure that pool has now one block.
2642 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002643 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002644
2645 // Delete all the remaining buffers, LIFO.
2646 while(!bufInfo.empty())
2647 {
2648 const BufferInfo& currBufInfo = bufInfo.back();
2649 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2650 bufInfo.pop_back();
2651 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002652 }
2653
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002654 vmaDestroyPool(g_hAllocator, pool);
2655}
2656
Adam Sawickifd11d752018-08-22 15:02:10 +02002657static void ManuallyTestLinearAllocator()
2658{
2659 VmaStats origStats;
2660 vmaCalculateStats(g_hAllocator, &origStats);
2661
2662 wprintf(L"Manually test linear allocator\n");
2663
2664 RandomNumberGenerator rand{645332};
2665
2666 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2667 sampleBufCreateInfo.size = 1024; // Whatever.
2668 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2669
2670 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2671 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2672
2673 VmaPoolCreateInfo poolCreateInfo = {};
2674 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002675 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002676
2677 poolCreateInfo.blockSize = 10 * 1024;
2678 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2679 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2680
2681 VmaPool pool = nullptr;
2682 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002683 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002684
2685 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2686
2687 VmaAllocationCreateInfo allocCreateInfo = {};
2688 allocCreateInfo.pool = pool;
2689
2690 std::vector<BufferInfo> bufInfo;
2691 VmaAllocationInfo allocInfo;
2692 BufferInfo newBufInfo;
2693
2694 // Test double stack.
2695 {
2696 /*
2697 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2698 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2699
2700 Totally:
2701 1 block allocated
2702 10240 Vulkan bytes
2703 6 new allocations
2704 2256 bytes in allocations
2705 */
2706
2707 bufCreateInfo.size = 32;
2708 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2709 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002710 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002711 bufInfo.push_back(newBufInfo);
2712
2713 bufCreateInfo.size = 1024;
2714 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2715 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002716 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002717 bufInfo.push_back(newBufInfo);
2718
2719 bufCreateInfo.size = 32;
2720 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2721 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002722 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002723 bufInfo.push_back(newBufInfo);
2724
2725 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2726
2727 bufCreateInfo.size = 128;
2728 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2729 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002730 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002731 bufInfo.push_back(newBufInfo);
2732
2733 bufCreateInfo.size = 1024;
2734 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2735 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002736 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002737 bufInfo.push_back(newBufInfo);
2738
2739 bufCreateInfo.size = 16;
2740 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2741 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002742 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002743 bufInfo.push_back(newBufInfo);
2744
2745 VmaStats currStats;
2746 vmaCalculateStats(g_hAllocator, &currStats);
2747 VmaPoolStats poolStats;
2748 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2749
2750 char* statsStr = nullptr;
2751 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2752
2753 // PUT BREAKPOINT HERE TO CHECK.
2754 // Inspect: currStats versus origStats, poolStats, statsStr.
2755 int I = 0;
2756
2757 vmaFreeStatsString(g_hAllocator, statsStr);
2758
2759 // Destroy the buffers in reverse order.
2760 while(!bufInfo.empty())
2761 {
2762 const BufferInfo& currBufInfo = bufInfo.back();
2763 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2764 bufInfo.pop_back();
2765 }
2766 }
2767
2768 vmaDestroyPool(g_hAllocator, pool);
2769}
2770
Adam Sawicki80927152018-09-07 17:27:23 +02002771static void BenchmarkAlgorithmsCase(FILE* file,
2772 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002773 bool empty,
2774 VmaAllocationCreateFlags allocStrategy,
2775 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002776{
2777 RandomNumberGenerator rand{16223};
2778
2779 const VkDeviceSize bufSizeMin = 32;
2780 const VkDeviceSize bufSizeMax = 1024;
2781 const size_t maxBufCapacity = 10000;
2782 const uint32_t iterationCount = 10;
2783
2784 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2785 sampleBufCreateInfo.size = bufSizeMax;
2786 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2787
2788 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2789 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2790
2791 VmaPoolCreateInfo poolCreateInfo = {};
2792 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002793 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002794
2795 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002796 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002797 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2798
2799 VmaPool pool = nullptr;
2800 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002801 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002802
2803 // Buffer created just to get memory requirements. Never bound to any memory.
2804 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2805 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002806 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002807
2808 VkMemoryRequirements memReq = {};
2809 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2810
2811 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2812
2813 VmaAllocationCreateInfo allocCreateInfo = {};
2814 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002815 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002816
2817 VmaAllocation alloc;
2818 std::vector<VmaAllocation> baseAllocations;
2819
2820 if(!empty)
2821 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002822 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002823 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002824 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002825 {
2826 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2827 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002828 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002829 baseAllocations.push_back(alloc);
2830 totalSize += memReq.size;
2831 }
2832
2833 // Delete half of them, choose randomly.
2834 size_t allocsToDelete = baseAllocations.size() / 2;
2835 for(size_t i = 0; i < allocsToDelete; ++i)
2836 {
2837 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2838 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2839 baseAllocations.erase(baseAllocations.begin() + index);
2840 }
2841 }
2842
2843 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002844 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002845 std::vector<VmaAllocation> testAllocations;
2846 testAllocations.reserve(allocCount);
2847 duration allocTotalDuration = duration::zero();
2848 duration freeTotalDuration = duration::zero();
2849 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2850 {
2851 // Allocations
2852 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2853 for(size_t i = 0; i < allocCount; ++i)
2854 {
2855 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2856 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002857 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002858 testAllocations.push_back(alloc);
2859 }
2860 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2861
2862 // Deallocations
2863 switch(freeOrder)
2864 {
2865 case FREE_ORDER::FORWARD:
2866 // Leave testAllocations unchanged.
2867 break;
2868 case FREE_ORDER::BACKWARD:
2869 std::reverse(testAllocations.begin(), testAllocations.end());
2870 break;
2871 case FREE_ORDER::RANDOM:
2872 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2873 break;
2874 default: assert(0);
2875 }
2876
2877 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2878 for(size_t i = 0; i < allocCount; ++i)
2879 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2880 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2881
2882 testAllocations.clear();
2883 }
2884
2885 // Delete baseAllocations
2886 while(!baseAllocations.empty())
2887 {
2888 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2889 baseAllocations.pop_back();
2890 }
2891
2892 vmaDestroyPool(g_hAllocator, pool);
2893
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002894 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2895 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2896
Adam Sawicki80927152018-09-07 17:27:23 +02002897 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2898 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002899 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002900 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002901 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002902 allocTotalSeconds,
2903 freeTotalSeconds);
2904
2905 if(file)
2906 {
2907 std::string currTime;
2908 CurrentTimeToStr(currTime);
2909
Adam Sawicki80927152018-09-07 17:27:23 +02002910 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002911 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002912 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002913 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002914 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002915 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2916 allocTotalSeconds,
2917 freeTotalSeconds);
2918 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002919}
2920
Adam Sawicki80927152018-09-07 17:27:23 +02002921static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002922{
Adam Sawicki80927152018-09-07 17:27:23 +02002923 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002924
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002925 if(file)
2926 {
2927 fprintf(file,
2928 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002929 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002930 "Allocation time (s),Deallocation time (s)\n");
2931 }
2932
Adam Sawicki0a607132018-08-24 11:18:41 +02002933 uint32_t freeOrderCount = 1;
2934 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2935 freeOrderCount = 3;
2936 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2937 freeOrderCount = 2;
2938
2939 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002940 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002941
2942 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2943 {
2944 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2945 switch(freeOrderIndex)
2946 {
2947 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2948 case 1: freeOrder = FREE_ORDER::FORWARD; break;
2949 case 2: freeOrder = FREE_ORDER::RANDOM; break;
2950 default: assert(0);
2951 }
2952
2953 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
2954 {
Adam Sawicki80927152018-09-07 17:27:23 +02002955 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02002956 {
Adam Sawicki80927152018-09-07 17:27:23 +02002957 uint32_t algorithm = 0;
2958 switch(algorithmIndex)
2959 {
2960 case 0:
2961 break;
2962 case 1:
2963 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
2964 break;
2965 case 2:
2966 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2967 break;
2968 default:
2969 assert(0);
2970 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002971
Adam Sawicki80927152018-09-07 17:27:23 +02002972 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002973 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
2974 {
2975 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02002976 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002977 {
2978 switch(allocStrategyIndex)
2979 {
2980 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
2981 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
2982 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
2983 default: assert(0);
2984 }
2985 }
2986
Adam Sawicki80927152018-09-07 17:27:23 +02002987 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002988 file,
Adam Sawicki80927152018-09-07 17:27:23 +02002989 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002990 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002991 strategy,
2992 freeOrder); // freeOrder
2993 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002994 }
2995 }
2996 }
2997}
2998
Adam Sawickib8333fb2018-03-13 16:15:53 +01002999static void TestPool_SameSize()
3000{
3001 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3002 const size_t BUF_COUNT = 100;
3003 VkResult res;
3004
3005 RandomNumberGenerator rand{123};
3006
3007 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3008 bufferInfo.size = BUF_SIZE;
3009 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3010
3011 uint32_t memoryTypeBits = UINT32_MAX;
3012 {
3013 VkBuffer dummyBuffer;
3014 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003015 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003016
3017 VkMemoryRequirements memReq;
3018 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3019 memoryTypeBits = memReq.memoryTypeBits;
3020
3021 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3022 }
3023
3024 VmaAllocationCreateInfo poolAllocInfo = {};
3025 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3026 uint32_t memTypeIndex;
3027 res = vmaFindMemoryTypeIndex(
3028 g_hAllocator,
3029 memoryTypeBits,
3030 &poolAllocInfo,
3031 &memTypeIndex);
3032
3033 VmaPoolCreateInfo poolCreateInfo = {};
3034 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3035 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3036 poolCreateInfo.minBlockCount = 1;
3037 poolCreateInfo.maxBlockCount = 4;
3038 poolCreateInfo.frameInUseCount = 0;
3039
3040 VmaPool pool;
3041 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003042 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003043
3044 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3045
3046 VmaAllocationCreateInfo allocInfo = {};
3047 allocInfo.pool = pool;
3048 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3049 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3050
3051 struct BufItem
3052 {
3053 VkBuffer Buf;
3054 VmaAllocation Alloc;
3055 };
3056 std::vector<BufItem> items;
3057
3058 // Fill entire pool.
3059 for(size_t i = 0; i < BUF_COUNT; ++i)
3060 {
3061 BufItem item;
3062 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003063 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003064 items.push_back(item);
3065 }
3066
3067 // Make sure that another allocation would fail.
3068 {
3069 BufItem item;
3070 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003071 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003072 }
3073
3074 // Validate that no buffer is lost. Also check that they are not mapped.
3075 for(size_t i = 0; i < items.size(); ++i)
3076 {
3077 VmaAllocationInfo allocInfo;
3078 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003079 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3080 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003081 }
3082
3083 // Free some percent of random items.
3084 {
3085 const size_t PERCENT_TO_FREE = 10;
3086 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3087 for(size_t i = 0; i < itemsToFree; ++i)
3088 {
3089 size_t index = (size_t)rand.Generate() % items.size();
3090 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3091 items.erase(items.begin() + index);
3092 }
3093 }
3094
3095 // Randomly allocate and free items.
3096 {
3097 const size_t OPERATION_COUNT = BUF_COUNT;
3098 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3099 {
3100 bool allocate = rand.Generate() % 2 != 0;
3101 if(allocate)
3102 {
3103 if(items.size() < BUF_COUNT)
3104 {
3105 BufItem item;
3106 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003107 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003108 items.push_back(item);
3109 }
3110 }
3111 else // Free
3112 {
3113 if(!items.empty())
3114 {
3115 size_t index = (size_t)rand.Generate() % items.size();
3116 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3117 items.erase(items.begin() + index);
3118 }
3119 }
3120 }
3121 }
3122
3123 // Allocate up to maximum.
3124 while(items.size() < BUF_COUNT)
3125 {
3126 BufItem item;
3127 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003128 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003129 items.push_back(item);
3130 }
3131
3132 // Validate that no buffer is lost.
3133 for(size_t i = 0; i < items.size(); ++i)
3134 {
3135 VmaAllocationInfo allocInfo;
3136 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003137 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003138 }
3139
3140 // Next frame.
3141 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3142
3143 // Allocate another BUF_COUNT buffers.
3144 for(size_t i = 0; i < BUF_COUNT; ++i)
3145 {
3146 BufItem item;
3147 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003148 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003149 items.push_back(item);
3150 }
3151
3152 // Make sure the first BUF_COUNT is lost. Delete them.
3153 for(size_t i = 0; i < BUF_COUNT; ++i)
3154 {
3155 VmaAllocationInfo allocInfo;
3156 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003157 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003158 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3159 }
3160 items.erase(items.begin(), items.begin() + BUF_COUNT);
3161
3162 // Validate that no buffer is lost.
3163 for(size_t i = 0; i < items.size(); ++i)
3164 {
3165 VmaAllocationInfo allocInfo;
3166 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003167 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003168 }
3169
3170 // Free one item.
3171 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3172 items.pop_back();
3173
3174 // Validate statistics.
3175 {
3176 VmaPoolStats poolStats = {};
3177 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003178 TEST(poolStats.allocationCount == items.size());
3179 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3180 TEST(poolStats.unusedRangeCount == 1);
3181 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3182 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003183 }
3184
3185 // Free all remaining items.
3186 for(size_t i = items.size(); i--; )
3187 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3188 items.clear();
3189
3190 // Allocate maximum items again.
3191 for(size_t i = 0; i < BUF_COUNT; ++i)
3192 {
3193 BufItem item;
3194 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003195 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003196 items.push_back(item);
3197 }
3198
3199 // Delete every other item.
3200 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3201 {
3202 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3203 items.erase(items.begin() + i);
3204 }
3205
3206 // Defragment!
3207 {
3208 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3209 for(size_t i = 0; i < items.size(); ++i)
3210 allocationsToDefragment[i] = items[i].Alloc;
3211
3212 VmaDefragmentationStats defragmentationStats;
3213 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003214 TEST(res == VK_SUCCESS);
3215 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003216 }
3217
3218 // Free all remaining items.
3219 for(size_t i = items.size(); i--; )
3220 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3221 items.clear();
3222
3223 ////////////////////////////////////////////////////////////////////////////////
3224 // Test for vmaMakePoolAllocationsLost
3225
3226 // Allocate 4 buffers on frame 10.
3227 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3228 for(size_t i = 0; i < 4; ++i)
3229 {
3230 BufItem item;
3231 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003232 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003233 items.push_back(item);
3234 }
3235
3236 // Touch first 2 of them on frame 11.
3237 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3238 for(size_t i = 0; i < 2; ++i)
3239 {
3240 VmaAllocationInfo allocInfo;
3241 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3242 }
3243
3244 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3245 size_t lostCount = 0xDEADC0DE;
3246 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003247 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003248
3249 // Make another call. Now 0 should be lost.
3250 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003251 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003252
3253 // Make another call, with null count. Should not crash.
3254 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3255
3256 // END: Free all remaining items.
3257 for(size_t i = items.size(); i--; )
3258 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3259
3260 items.clear();
3261
Adam Sawickid2924172018-06-11 12:48:46 +02003262 ////////////////////////////////////////////////////////////////////////////////
3263 // Test for allocation too large for pool
3264
3265 {
3266 VmaAllocationCreateInfo allocCreateInfo = {};
3267 allocCreateInfo.pool = pool;
3268
3269 VkMemoryRequirements memReq;
3270 memReq.memoryTypeBits = UINT32_MAX;
3271 memReq.alignment = 1;
3272 memReq.size = poolCreateInfo.blockSize + 4;
3273
3274 VmaAllocation alloc = nullptr;
3275 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003276 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003277 }
3278
Adam Sawickib8333fb2018-03-13 16:15:53 +01003279 vmaDestroyPool(g_hAllocator, pool);
3280}
3281
Adam Sawickib0c36362018-11-13 16:17:38 +01003282static void TestResize()
3283{
3284 wprintf(L"Testing vmaResizeAllocation...\n");
3285
3286 const VkDeviceSize KILOBYTE = 1024ull;
3287 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3288
3289 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3290 bufCreateInfo.size = 2 * MEGABYTE;
3291 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3292
3293 VmaAllocationCreateInfo allocCreateInfo = {};
3294 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3295
3296 uint32_t memTypeIndex = UINT32_MAX;
3297 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3298
3299 VmaPoolCreateInfo poolCreateInfo = {};
3300 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3301 poolCreateInfo.blockSize = 8 * MEGABYTE;
3302 poolCreateInfo.minBlockCount = 1;
3303 poolCreateInfo.maxBlockCount = 1;
3304 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3305
3306 VmaPool pool;
3307 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3308
3309 allocCreateInfo.pool = pool;
3310
3311 // Fill 8 MB pool with 4 * 2 MB allocations.
3312 VmaAllocation allocs[4] = {};
3313
3314 VkMemoryRequirements memReq = {};
3315 memReq.memoryTypeBits = UINT32_MAX;
3316 memReq.alignment = 4;
3317 memReq.size = bufCreateInfo.size;
3318
3319 VmaAllocationInfo allocInfo = {};
3320
3321 for(uint32_t i = 0; i < 4; ++i)
3322 {
3323 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3324 }
3325
3326 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3327
3328 // Case: Resize to the same size always succeeds.
3329 {
3330 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3331 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3332 TEST(allocInfo.size == 2ull * 1024 * 1024);
3333 }
3334
3335 // Case: Shrink allocation at the end.
3336 {
3337 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3338 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3339 TEST(allocInfo.size == 1ull * 1024 * 1024);
3340 }
3341
3342 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3343
3344 // Case: Shrink allocation before free space.
3345 {
3346 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3347 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3348 TEST(allocInfo.size == 512 * KILOBYTE);
3349 }
3350
3351 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3352
3353 // Case: Shrink allocation before next allocation.
3354 {
3355 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3356 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3357 TEST(allocInfo.size == 1 * MEGABYTE);
3358 }
3359
3360 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3361
3362 // Case: Grow allocation while there is even more space available.
3363 {
3364 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3365 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3366 TEST(allocInfo.size == 1 * MEGABYTE);
3367 }
3368
3369 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3370
3371 // Case: Grow allocation while there is exact amount of free space available.
3372 {
3373 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3374 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3375 TEST(allocInfo.size == 2 * MEGABYTE);
3376 }
3377
3378 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3379
3380 // Case: Fail to grow when there is not enough free space due to next allocation.
3381 {
3382 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3383 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3384 TEST(allocInfo.size == 2 * MEGABYTE);
3385 }
3386
3387 // Case: Fail to grow when there is not enough free space due to end of memory block.
3388 {
3389 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3390 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3391 TEST(allocInfo.size == 1 * MEGABYTE);
3392 }
3393
3394 for(uint32_t i = 4; i--; )
3395 {
3396 vmaFreeMemory(g_hAllocator, allocs[i]);
3397 }
3398
3399 vmaDestroyPool(g_hAllocator, pool);
3400
3401 // Test dedicated allocation
3402 {
3403 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3404 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3405 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3406
3407 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3408 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3409
3410 // Case: Resize to the same size always succeeds.
3411 {
3412 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3413 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3414 TEST(allocInfo.size == 2ull * 1024 * 1024);
3415 }
3416
3417 // Case: Shrinking fails.
3418 {
3419 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3420 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3421 TEST(allocInfo.size == 2ull * 1024 * 1024);
3422 }
3423
3424 // Case: Growing fails.
3425 {
3426 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3427 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3428 TEST(allocInfo.size == 2ull * 1024 * 1024);
3429 }
3430
3431 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3432 }
3433}
3434
Adam Sawickie44c6262018-06-15 14:30:39 +02003435static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3436{
3437 const uint8_t* pBytes = (const uint8_t*)pMemory;
3438 for(size_t i = 0; i < size; ++i)
3439 {
3440 if(pBytes[i] != pattern)
3441 {
3442 return false;
3443 }
3444 }
3445 return true;
3446}
3447
3448static void TestAllocationsInitialization()
3449{
3450 VkResult res;
3451
3452 const size_t BUF_SIZE = 1024;
3453
3454 // Create pool.
3455
3456 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3457 bufInfo.size = BUF_SIZE;
3458 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3459
3460 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3461 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3462
3463 VmaPoolCreateInfo poolCreateInfo = {};
3464 poolCreateInfo.blockSize = BUF_SIZE * 10;
3465 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3466 poolCreateInfo.maxBlockCount = 1;
3467 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003468 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003469
3470 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3471 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003472 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003473
3474 // Create one persistently mapped buffer to keep memory of this block mapped,
3475 // so that pointer to mapped data will remain (more or less...) valid even
3476 // after destruction of other allocations.
3477
3478 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3479 VkBuffer firstBuf;
3480 VmaAllocation firstAlloc;
3481 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003482 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003483
3484 // Test buffers.
3485
3486 for(uint32_t i = 0; i < 2; ++i)
3487 {
3488 const bool persistentlyMapped = i == 0;
3489 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3490 VkBuffer buf;
3491 VmaAllocation alloc;
3492 VmaAllocationInfo allocInfo;
3493 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003494 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003495
3496 void* pMappedData;
3497 if(!persistentlyMapped)
3498 {
3499 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003500 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003501 }
3502 else
3503 {
3504 pMappedData = allocInfo.pMappedData;
3505 }
3506
3507 // Validate initialized content
3508 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003509 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003510
3511 if(!persistentlyMapped)
3512 {
3513 vmaUnmapMemory(g_hAllocator, alloc);
3514 }
3515
3516 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3517
3518 // Validate freed content
3519 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003520 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003521 }
3522
3523 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3524 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3525}
3526
Adam Sawickib8333fb2018-03-13 16:15:53 +01003527static void TestPool_Benchmark(
3528 PoolTestResult& outResult,
3529 const PoolTestConfig& config)
3530{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003531 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003532
3533 RandomNumberGenerator mainRand{config.RandSeed};
3534
3535 uint32_t allocationSizeProbabilitySum = std::accumulate(
3536 config.AllocationSizes.begin(),
3537 config.AllocationSizes.end(),
3538 0u,
3539 [](uint32_t sum, const AllocationSize& allocSize) {
3540 return sum + allocSize.Probability;
3541 });
3542
3543 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3544 bufferInfo.size = 256; // Whatever.
3545 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3546
3547 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3548 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3549 imageInfo.extent.width = 256; // Whatever.
3550 imageInfo.extent.height = 256; // Whatever.
3551 imageInfo.extent.depth = 1;
3552 imageInfo.mipLevels = 1;
3553 imageInfo.arrayLayers = 1;
3554 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3555 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3556 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3557 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3558 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3559
3560 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3561 {
3562 VkBuffer dummyBuffer;
3563 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003564 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003565
3566 VkMemoryRequirements memReq;
3567 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3568 bufferMemoryTypeBits = memReq.memoryTypeBits;
3569
3570 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3571 }
3572
3573 uint32_t imageMemoryTypeBits = UINT32_MAX;
3574 {
3575 VkImage dummyImage;
3576 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003577 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003578
3579 VkMemoryRequirements memReq;
3580 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3581 imageMemoryTypeBits = memReq.memoryTypeBits;
3582
3583 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3584 }
3585
3586 uint32_t memoryTypeBits = 0;
3587 if(config.UsesBuffers() && config.UsesImages())
3588 {
3589 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3590 if(memoryTypeBits == 0)
3591 {
3592 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3593 return;
3594 }
3595 }
3596 else if(config.UsesBuffers())
3597 memoryTypeBits = bufferMemoryTypeBits;
3598 else if(config.UsesImages())
3599 memoryTypeBits = imageMemoryTypeBits;
3600 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003601 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003602
3603 VmaPoolCreateInfo poolCreateInfo = {};
3604 poolCreateInfo.memoryTypeIndex = 0;
3605 poolCreateInfo.minBlockCount = 1;
3606 poolCreateInfo.maxBlockCount = 1;
3607 poolCreateInfo.blockSize = config.PoolSize;
3608 poolCreateInfo.frameInUseCount = 1;
3609
3610 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3611 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3612 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3613
3614 VmaPool pool;
3615 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003616 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003617
3618 // Start time measurement - after creating pool and initializing data structures.
3619 time_point timeBeg = std::chrono::high_resolution_clock::now();
3620
3621 ////////////////////////////////////////////////////////////////////////////////
3622 // ThreadProc
3623 auto ThreadProc = [&](
3624 PoolTestThreadResult* outThreadResult,
3625 uint32_t randSeed,
3626 HANDLE frameStartEvent,
3627 HANDLE frameEndEvent) -> void
3628 {
3629 RandomNumberGenerator threadRand{randSeed};
3630
3631 outThreadResult->AllocationTimeMin = duration::max();
3632 outThreadResult->AllocationTimeSum = duration::zero();
3633 outThreadResult->AllocationTimeMax = duration::min();
3634 outThreadResult->DeallocationTimeMin = duration::max();
3635 outThreadResult->DeallocationTimeSum = duration::zero();
3636 outThreadResult->DeallocationTimeMax = duration::min();
3637 outThreadResult->AllocationCount = 0;
3638 outThreadResult->DeallocationCount = 0;
3639 outThreadResult->LostAllocationCount = 0;
3640 outThreadResult->LostAllocationTotalSize = 0;
3641 outThreadResult->FailedAllocationCount = 0;
3642 outThreadResult->FailedAllocationTotalSize = 0;
3643
3644 struct Item
3645 {
3646 VkDeviceSize BufferSize;
3647 VkExtent2D ImageSize;
3648 VkBuffer Buf;
3649 VkImage Image;
3650 VmaAllocation Alloc;
3651
3652 VkDeviceSize CalcSizeBytes() const
3653 {
3654 return BufferSize +
3655 ImageSize.width * ImageSize.height * 4;
3656 }
3657 };
3658 std::vector<Item> unusedItems, usedItems;
3659
3660 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3661
3662 // Create all items - all unused, not yet allocated.
3663 for(size_t i = 0; i < threadTotalItemCount; ++i)
3664 {
3665 Item item = {};
3666
3667 uint32_t allocSizeIndex = 0;
3668 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3669 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3670 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3671
3672 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3673 if(allocSize.BufferSizeMax > 0)
3674 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003675 TEST(allocSize.BufferSizeMin > 0);
3676 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003677 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3678 item.BufferSize = allocSize.BufferSizeMin;
3679 else
3680 {
3681 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3682 item.BufferSize = item.BufferSize / 16 * 16;
3683 }
3684 }
3685 else
3686 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003687 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003688 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3689 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3690 else
3691 {
3692 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3693 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3694 }
3695 }
3696
3697 unusedItems.push_back(item);
3698 }
3699
3700 auto Allocate = [&](Item& item) -> VkResult
3701 {
3702 VmaAllocationCreateInfo allocCreateInfo = {};
3703 allocCreateInfo.pool = pool;
3704 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3705 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3706
3707 if(item.BufferSize)
3708 {
3709 bufferInfo.size = item.BufferSize;
3710 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3711 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3712 }
3713 else
3714 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003715 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003716
3717 imageInfo.extent.width = item.ImageSize.width;
3718 imageInfo.extent.height = item.ImageSize.height;
3719 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3720 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3721 }
3722 };
3723
3724 ////////////////////////////////////////////////////////////////////////////////
3725 // Frames
3726 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3727 {
3728 WaitForSingleObject(frameStartEvent, INFINITE);
3729
3730 // Always make some percent of used bufs unused, to choose different used ones.
3731 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3732 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3733 {
3734 size_t index = threadRand.Generate() % usedItems.size();
3735 unusedItems.push_back(usedItems[index]);
3736 usedItems.erase(usedItems.begin() + index);
3737 }
3738
3739 // Determine which bufs we want to use in this frame.
3740 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3741 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003742 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003743 // Move some used to unused.
3744 while(usedBufCount < usedItems.size())
3745 {
3746 size_t index = threadRand.Generate() % usedItems.size();
3747 unusedItems.push_back(usedItems[index]);
3748 usedItems.erase(usedItems.begin() + index);
3749 }
3750 // Move some unused to used.
3751 while(usedBufCount > usedItems.size())
3752 {
3753 size_t index = threadRand.Generate() % unusedItems.size();
3754 usedItems.push_back(unusedItems[index]);
3755 unusedItems.erase(unusedItems.begin() + index);
3756 }
3757
3758 uint32_t touchExistingCount = 0;
3759 uint32_t touchLostCount = 0;
3760 uint32_t createSucceededCount = 0;
3761 uint32_t createFailedCount = 0;
3762
3763 // Touch all used bufs. If not created or lost, allocate.
3764 for(size_t i = 0; i < usedItems.size(); ++i)
3765 {
3766 Item& item = usedItems[i];
3767 // Not yet created.
3768 if(item.Alloc == VK_NULL_HANDLE)
3769 {
3770 res = Allocate(item);
3771 ++outThreadResult->AllocationCount;
3772 if(res != VK_SUCCESS)
3773 {
3774 item.Alloc = VK_NULL_HANDLE;
3775 item.Buf = VK_NULL_HANDLE;
3776 ++outThreadResult->FailedAllocationCount;
3777 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3778 ++createFailedCount;
3779 }
3780 else
3781 ++createSucceededCount;
3782 }
3783 else
3784 {
3785 // Touch.
3786 VmaAllocationInfo allocInfo;
3787 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3788 // Lost.
3789 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3790 {
3791 ++touchLostCount;
3792
3793 // Destroy.
3794 {
3795 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3796 if(item.Buf)
3797 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3798 else
3799 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3800 ++outThreadResult->DeallocationCount;
3801 }
3802 item.Alloc = VK_NULL_HANDLE;
3803 item.Buf = VK_NULL_HANDLE;
3804
3805 ++outThreadResult->LostAllocationCount;
3806 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3807
3808 // Recreate.
3809 res = Allocate(item);
3810 ++outThreadResult->AllocationCount;
3811 // Creation failed.
3812 if(res != VK_SUCCESS)
3813 {
3814 ++outThreadResult->FailedAllocationCount;
3815 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3816 ++createFailedCount;
3817 }
3818 else
3819 ++createSucceededCount;
3820 }
3821 else
3822 ++touchExistingCount;
3823 }
3824 }
3825
3826 /*
3827 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3828 randSeed, frameIndex,
3829 touchExistingCount, touchLostCount,
3830 createSucceededCount, createFailedCount);
3831 */
3832
3833 SetEvent(frameEndEvent);
3834 }
3835
3836 // Free all remaining items.
3837 for(size_t i = usedItems.size(); i--; )
3838 {
3839 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3840 if(usedItems[i].Buf)
3841 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3842 else
3843 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3844 ++outThreadResult->DeallocationCount;
3845 }
3846 for(size_t i = unusedItems.size(); i--; )
3847 {
3848 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3849 if(unusedItems[i].Buf)
3850 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3851 else
3852 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3853 ++outThreadResult->DeallocationCount;
3854 }
3855 };
3856
3857 // Launch threads.
3858 uint32_t threadRandSeed = mainRand.Generate();
3859 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3860 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3861 std::vector<std::thread> bkgThreads;
3862 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3863 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3864 {
3865 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3866 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3867 bkgThreads.emplace_back(std::bind(
3868 ThreadProc,
3869 &threadResults[threadIndex],
3870 threadRandSeed + threadIndex,
3871 frameStartEvents[threadIndex],
3872 frameEndEvents[threadIndex]));
3873 }
3874
3875 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003876 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003877 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3878 {
3879 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3880 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3881 SetEvent(frameStartEvents[threadIndex]);
3882 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3883 }
3884
3885 // Wait for threads finished
3886 for(size_t i = 0; i < bkgThreads.size(); ++i)
3887 {
3888 bkgThreads[i].join();
3889 CloseHandle(frameEndEvents[i]);
3890 CloseHandle(frameStartEvents[i]);
3891 }
3892 bkgThreads.clear();
3893
3894 // Finish time measurement - before destroying pool.
3895 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3896
3897 vmaDestroyPool(g_hAllocator, pool);
3898
3899 outResult.AllocationTimeMin = duration::max();
3900 outResult.AllocationTimeAvg = duration::zero();
3901 outResult.AllocationTimeMax = duration::min();
3902 outResult.DeallocationTimeMin = duration::max();
3903 outResult.DeallocationTimeAvg = duration::zero();
3904 outResult.DeallocationTimeMax = duration::min();
3905 outResult.LostAllocationCount = 0;
3906 outResult.LostAllocationTotalSize = 0;
3907 outResult.FailedAllocationCount = 0;
3908 outResult.FailedAllocationTotalSize = 0;
3909 size_t allocationCount = 0;
3910 size_t deallocationCount = 0;
3911 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3912 {
3913 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3914 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3915 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3916 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3917 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3918 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3919 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3920 allocationCount += threadResult.AllocationCount;
3921 deallocationCount += threadResult.DeallocationCount;
3922 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3923 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3924 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3925 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3926 }
3927 if(allocationCount)
3928 outResult.AllocationTimeAvg /= allocationCount;
3929 if(deallocationCount)
3930 outResult.DeallocationTimeAvg /= deallocationCount;
3931}
3932
3933static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3934{
3935 if(ptr1 < ptr2)
3936 return ptr1 + size1 > ptr2;
3937 else if(ptr2 < ptr1)
3938 return ptr2 + size2 > ptr1;
3939 else
3940 return true;
3941}
3942
3943static void TestMapping()
3944{
3945 wprintf(L"Testing mapping...\n");
3946
3947 VkResult res;
3948 uint32_t memTypeIndex = UINT32_MAX;
3949
3950 enum TEST
3951 {
3952 TEST_NORMAL,
3953 TEST_POOL,
3954 TEST_DEDICATED,
3955 TEST_COUNT
3956 };
3957 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3958 {
3959 VmaPool pool = nullptr;
3960 if(testIndex == TEST_POOL)
3961 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003962 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003963 VmaPoolCreateInfo poolInfo = {};
3964 poolInfo.memoryTypeIndex = memTypeIndex;
3965 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003966 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003967 }
3968
3969 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3970 bufInfo.size = 0x10000;
3971 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3972
3973 VmaAllocationCreateInfo allocCreateInfo = {};
3974 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3975 allocCreateInfo.pool = pool;
3976 if(testIndex == TEST_DEDICATED)
3977 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3978
3979 VmaAllocationInfo allocInfo;
3980
3981 // Mapped manually
3982
3983 // Create 2 buffers.
3984 BufferInfo bufferInfos[3];
3985 for(size_t i = 0; i < 2; ++i)
3986 {
3987 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3988 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003989 TEST(res == VK_SUCCESS);
3990 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003991 memTypeIndex = allocInfo.memoryType;
3992 }
3993
3994 // Map buffer 0.
3995 char* data00 = nullptr;
3996 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003997 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003998 data00[0xFFFF] = data00[0];
3999
4000 // Map buffer 0 second time.
4001 char* data01 = nullptr;
4002 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004003 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004004
4005 // Map buffer 1.
4006 char* data1 = nullptr;
4007 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004008 TEST(res == VK_SUCCESS && data1 != nullptr);
4009 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004010 data1[0xFFFF] = data1[0];
4011
4012 // Unmap buffer 0 two times.
4013 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4014 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4015 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004016 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004017
4018 // Unmap buffer 1.
4019 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4020 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004021 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004022
4023 // Create 3rd buffer - persistently mapped.
4024 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4025 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4026 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004027 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004028
4029 // Map buffer 2.
4030 char* data2 = nullptr;
4031 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004032 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004033 data2[0xFFFF] = data2[0];
4034
4035 // Unmap buffer 2.
4036 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4037 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004038 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004039
4040 // Destroy all buffers.
4041 for(size_t i = 3; i--; )
4042 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4043
4044 vmaDestroyPool(g_hAllocator, pool);
4045 }
4046}
4047
4048static void TestMappingMultithreaded()
4049{
4050 wprintf(L"Testing mapping multithreaded...\n");
4051
4052 static const uint32_t threadCount = 16;
4053 static const uint32_t bufferCount = 1024;
4054 static const uint32_t threadBufferCount = bufferCount / threadCount;
4055
4056 VkResult res;
4057 volatile uint32_t memTypeIndex = UINT32_MAX;
4058
4059 enum TEST
4060 {
4061 TEST_NORMAL,
4062 TEST_POOL,
4063 TEST_DEDICATED,
4064 TEST_COUNT
4065 };
4066 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4067 {
4068 VmaPool pool = nullptr;
4069 if(testIndex == TEST_POOL)
4070 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004071 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004072 VmaPoolCreateInfo poolInfo = {};
4073 poolInfo.memoryTypeIndex = memTypeIndex;
4074 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004075 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004076 }
4077
4078 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4079 bufCreateInfo.size = 0x10000;
4080 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4081
4082 VmaAllocationCreateInfo allocCreateInfo = {};
4083 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4084 allocCreateInfo.pool = pool;
4085 if(testIndex == TEST_DEDICATED)
4086 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4087
4088 std::thread threads[threadCount];
4089 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4090 {
4091 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4092 // ======== THREAD FUNCTION ========
4093
4094 RandomNumberGenerator rand{threadIndex};
4095
4096 enum class MODE
4097 {
4098 // Don't map this buffer at all.
4099 DONT_MAP,
4100 // Map and quickly unmap.
4101 MAP_FOR_MOMENT,
4102 // Map and unmap before destruction.
4103 MAP_FOR_LONGER,
4104 // Map two times. Quickly unmap, second unmap before destruction.
4105 MAP_TWO_TIMES,
4106 // Create this buffer as persistently mapped.
4107 PERSISTENTLY_MAPPED,
4108 COUNT
4109 };
4110 std::vector<BufferInfo> bufInfos{threadBufferCount};
4111 std::vector<MODE> bufModes{threadBufferCount};
4112
4113 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4114 {
4115 BufferInfo& bufInfo = bufInfos[bufferIndex];
4116 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4117 bufModes[bufferIndex] = mode;
4118
4119 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4120 if(mode == MODE::PERSISTENTLY_MAPPED)
4121 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4122
4123 VmaAllocationInfo allocInfo;
4124 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4125 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004126 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004127
4128 if(memTypeIndex == UINT32_MAX)
4129 memTypeIndex = allocInfo.memoryType;
4130
4131 char* data = nullptr;
4132
4133 if(mode == MODE::PERSISTENTLY_MAPPED)
4134 {
4135 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004136 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004137 }
4138 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4139 mode == MODE::MAP_TWO_TIMES)
4140 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004141 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004142 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004143 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004144
4145 if(mode == MODE::MAP_TWO_TIMES)
4146 {
4147 char* data2 = nullptr;
4148 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004149 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004150 }
4151 }
4152 else if(mode == MODE::DONT_MAP)
4153 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004154 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004155 }
4156 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004157 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004158
4159 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4160 if(data)
4161 data[0xFFFF] = data[0];
4162
4163 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4164 {
4165 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4166
4167 VmaAllocationInfo allocInfo;
4168 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4169 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004170 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004171 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004172 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004173 }
4174
4175 switch(rand.Generate() % 3)
4176 {
4177 case 0: Sleep(0); break; // Yield.
4178 case 1: Sleep(10); break; // 10 ms
4179 // default: No sleep.
4180 }
4181
4182 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4183 if(data)
4184 data[0xFFFF] = data[0];
4185 }
4186
4187 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4188 {
4189 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4190 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4191 {
4192 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4193
4194 VmaAllocationInfo allocInfo;
4195 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004196 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004197 }
4198
4199 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4200 }
4201 });
4202 }
4203
4204 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4205 threads[threadIndex].join();
4206
4207 vmaDestroyPool(g_hAllocator, pool);
4208 }
4209}
4210
4211static void WriteMainTestResultHeader(FILE* file)
4212{
4213 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004214 "Code,Time,"
4215 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004216 "Total Time (us),"
4217 "Allocation Time Min (us),"
4218 "Allocation Time Avg (us),"
4219 "Allocation Time Max (us),"
4220 "Deallocation Time Min (us),"
4221 "Deallocation Time Avg (us),"
4222 "Deallocation Time Max (us),"
4223 "Total Memory Allocated (B),"
4224 "Free Range Size Avg (B),"
4225 "Free Range Size Max (B)\n");
4226}
4227
4228static void WriteMainTestResult(
4229 FILE* file,
4230 const char* codeDescription,
4231 const char* testDescription,
4232 const Config& config, const Result& result)
4233{
4234 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4235 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4236 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4237 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4238 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4239 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4240 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4241
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004242 std::string currTime;
4243 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004244
4245 fprintf(file,
4246 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004247 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4248 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004249 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004250 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004251 totalTimeSeconds * 1e6f,
4252 allocationTimeMinSeconds * 1e6f,
4253 allocationTimeAvgSeconds * 1e6f,
4254 allocationTimeMaxSeconds * 1e6f,
4255 deallocationTimeMinSeconds * 1e6f,
4256 deallocationTimeAvgSeconds * 1e6f,
4257 deallocationTimeMaxSeconds * 1e6f,
4258 result.TotalMemoryAllocated,
4259 result.FreeRangeSizeAvg,
4260 result.FreeRangeSizeMax);
4261}
4262
4263static void WritePoolTestResultHeader(FILE* file)
4264{
4265 fprintf(file,
4266 "Code,Test,Time,"
4267 "Config,"
4268 "Total Time (us),"
4269 "Allocation Time Min (us),"
4270 "Allocation Time Avg (us),"
4271 "Allocation Time Max (us),"
4272 "Deallocation Time Min (us),"
4273 "Deallocation Time Avg (us),"
4274 "Deallocation Time Max (us),"
4275 "Lost Allocation Count,"
4276 "Lost Allocation Total Size (B),"
4277 "Failed Allocation Count,"
4278 "Failed Allocation Total Size (B)\n");
4279}
4280
4281static void WritePoolTestResult(
4282 FILE* file,
4283 const char* codeDescription,
4284 const char* testDescription,
4285 const PoolTestConfig& config,
4286 const PoolTestResult& result)
4287{
4288 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4289 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4290 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4291 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4292 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4293 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4294 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4295
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004296 std::string currTime;
4297 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004298
4299 fprintf(file,
4300 "%s,%s,%s,"
4301 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4302 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4303 // General
4304 codeDescription,
4305 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004306 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004307 // Config
4308 config.ThreadCount,
4309 (unsigned long long)config.PoolSize,
4310 config.FrameCount,
4311 config.TotalItemCount,
4312 config.UsedItemCountMin,
4313 config.UsedItemCountMax,
4314 config.ItemsToMakeUnusedPercent,
4315 // Results
4316 totalTimeSeconds * 1e6f,
4317 allocationTimeMinSeconds * 1e6f,
4318 allocationTimeAvgSeconds * 1e6f,
4319 allocationTimeMaxSeconds * 1e6f,
4320 deallocationTimeMinSeconds * 1e6f,
4321 deallocationTimeAvgSeconds * 1e6f,
4322 deallocationTimeMaxSeconds * 1e6f,
4323 result.LostAllocationCount,
4324 result.LostAllocationTotalSize,
4325 result.FailedAllocationCount,
4326 result.FailedAllocationTotalSize);
4327}
4328
4329static void PerformCustomMainTest(FILE* file)
4330{
4331 Config config{};
4332 config.RandSeed = 65735476;
4333 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4334 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4335 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4336 config.FreeOrder = FREE_ORDER::FORWARD;
4337 config.ThreadCount = 16;
4338 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004339 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004340
4341 // Buffers
4342 //config.AllocationSizes.push_back({4, 16, 1024});
4343 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4344
4345 // Images
4346 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4347 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4348
4349 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4350 config.AdditionalOperationCount = 1024;
4351
4352 Result result{};
4353 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004354 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004355 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4356}
4357
4358static void PerformCustomPoolTest(FILE* file)
4359{
4360 PoolTestConfig config;
4361 config.PoolSize = 100 * 1024 * 1024;
4362 config.RandSeed = 2345764;
4363 config.ThreadCount = 1;
4364 config.FrameCount = 200;
4365 config.ItemsToMakeUnusedPercent = 2;
4366
4367 AllocationSize allocSize = {};
4368 allocSize.BufferSizeMin = 1024;
4369 allocSize.BufferSizeMax = 1024 * 1024;
4370 allocSize.Probability = 1;
4371 config.AllocationSizes.push_back(allocSize);
4372
4373 allocSize.BufferSizeMin = 0;
4374 allocSize.BufferSizeMax = 0;
4375 allocSize.ImageSizeMin = 128;
4376 allocSize.ImageSizeMax = 1024;
4377 allocSize.Probability = 1;
4378 config.AllocationSizes.push_back(allocSize);
4379
4380 config.PoolSize = config.CalcAvgResourceSize() * 200;
4381 config.UsedItemCountMax = 160;
4382 config.TotalItemCount = config.UsedItemCountMax * 10;
4383 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4384
4385 g_MemoryAliasingWarningEnabled = false;
4386 PoolTestResult result = {};
4387 TestPool_Benchmark(result, config);
4388 g_MemoryAliasingWarningEnabled = true;
4389
4390 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4391}
4392
Adam Sawickib8333fb2018-03-13 16:15:53 +01004393static void PerformMainTests(FILE* file)
4394{
4395 uint32_t repeatCount = 1;
4396 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4397
4398 Config config{};
4399 config.RandSeed = 65735476;
4400 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4401 config.FreeOrder = FREE_ORDER::FORWARD;
4402
4403 size_t threadCountCount = 1;
4404 switch(ConfigType)
4405 {
4406 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4407 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4408 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4409 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4410 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4411 default: assert(0);
4412 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004413
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004414 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004415
Adam Sawickib8333fb2018-03-13 16:15:53 +01004416 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4417 {
4418 std::string desc1;
4419
4420 switch(threadCountIndex)
4421 {
4422 case 0:
4423 desc1 += "1_thread";
4424 config.ThreadCount = 1;
4425 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4426 break;
4427 case 1:
4428 desc1 += "16_threads+0%_common";
4429 config.ThreadCount = 16;
4430 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4431 break;
4432 case 2:
4433 desc1 += "16_threads+50%_common";
4434 config.ThreadCount = 16;
4435 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4436 break;
4437 case 3:
4438 desc1 += "16_threads+100%_common";
4439 config.ThreadCount = 16;
4440 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4441 break;
4442 case 4:
4443 desc1 += "2_threads+0%_common";
4444 config.ThreadCount = 2;
4445 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4446 break;
4447 case 5:
4448 desc1 += "2_threads+50%_common";
4449 config.ThreadCount = 2;
4450 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4451 break;
4452 case 6:
4453 desc1 += "2_threads+100%_common";
4454 config.ThreadCount = 2;
4455 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4456 break;
4457 default:
4458 assert(0);
4459 }
4460
4461 // 0 = buffers, 1 = images, 2 = buffers and images
4462 size_t buffersVsImagesCount = 2;
4463 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4464 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4465 {
4466 std::string desc2 = desc1;
4467 switch(buffersVsImagesIndex)
4468 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004469 case 0: desc2 += ",Buffers"; break;
4470 case 1: desc2 += ",Images"; break;
4471 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004472 default: assert(0);
4473 }
4474
4475 // 0 = small, 1 = large, 2 = small and large
4476 size_t smallVsLargeCount = 2;
4477 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4478 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4479 {
4480 std::string desc3 = desc2;
4481 switch(smallVsLargeIndex)
4482 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004483 case 0: desc3 += ",Small"; break;
4484 case 1: desc3 += ",Large"; break;
4485 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004486 default: assert(0);
4487 }
4488
4489 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4490 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4491 else
4492 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4493
4494 // 0 = varying sizes min...max, 1 = set of constant sizes
4495 size_t constantSizesCount = 1;
4496 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4497 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4498 {
4499 std::string desc4 = desc3;
4500 switch(constantSizesIndex)
4501 {
4502 case 0: desc4 += " Varying_sizes"; break;
4503 case 1: desc4 += " Constant_sizes"; break;
4504 default: assert(0);
4505 }
4506
4507 config.AllocationSizes.clear();
4508 // Buffers present
4509 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4510 {
4511 // Small
4512 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4513 {
4514 // Varying size
4515 if(constantSizesIndex == 0)
4516 config.AllocationSizes.push_back({4, 16, 1024});
4517 // Constant sizes
4518 else
4519 {
4520 config.AllocationSizes.push_back({1, 16, 16});
4521 config.AllocationSizes.push_back({1, 64, 64});
4522 config.AllocationSizes.push_back({1, 256, 256});
4523 config.AllocationSizes.push_back({1, 1024, 1024});
4524 }
4525 }
4526 // Large
4527 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4528 {
4529 // Varying size
4530 if(constantSizesIndex == 0)
4531 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4532 // Constant sizes
4533 else
4534 {
4535 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4536 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4537 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4538 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4539 }
4540 }
4541 }
4542 // Images present
4543 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4544 {
4545 // Small
4546 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4547 {
4548 // Varying size
4549 if(constantSizesIndex == 0)
4550 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4551 // Constant sizes
4552 else
4553 {
4554 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4555 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4556 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4557 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4558 }
4559 }
4560 // Large
4561 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4562 {
4563 // Varying size
4564 if(constantSizesIndex == 0)
4565 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4566 // Constant sizes
4567 else
4568 {
4569 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4570 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4571 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4572 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4573 }
4574 }
4575 }
4576
4577 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4578 size_t beginBytesToAllocateCount = 1;
4579 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4580 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4581 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4582 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4583 {
4584 std::string desc5 = desc4;
4585
4586 switch(beginBytesToAllocateIndex)
4587 {
4588 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004589 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004590 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4591 config.AdditionalOperationCount = 0;
4592 break;
4593 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004594 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004595 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4596 config.AdditionalOperationCount = 1024;
4597 break;
4598 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004599 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004600 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4601 config.AdditionalOperationCount = 1024;
4602 break;
4603 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004604 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004605 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4606 config.AdditionalOperationCount = 1024;
4607 break;
4608 default:
4609 assert(0);
4610 }
4611
Adam Sawicki0667e332018-08-24 17:26:44 +02004612 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004613 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004614 std::string desc6 = desc5;
4615 switch(strategyIndex)
4616 {
4617 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004618 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004619 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4620 break;
4621 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004622 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004623 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4624 break;
4625 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004626 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004627 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4628 break;
4629 default:
4630 assert(0);
4631 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004632
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004633 desc6 += ',';
4634 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004635
4636 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004637
4638 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4639 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004640 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004641
4642 Result result{};
4643 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004644 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004645 if(file)
4646 {
4647 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4648 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004649 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004650 }
4651 }
4652 }
4653 }
4654 }
4655 }
4656}
4657
4658static void PerformPoolTests(FILE* file)
4659{
4660 const size_t AVG_RESOURCES_PER_POOL = 300;
4661
4662 uint32_t repeatCount = 1;
4663 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4664
4665 PoolTestConfig config{};
4666 config.RandSeed = 2346343;
4667 config.FrameCount = 200;
4668 config.ItemsToMakeUnusedPercent = 2;
4669
4670 size_t threadCountCount = 1;
4671 switch(ConfigType)
4672 {
4673 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4674 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4675 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4676 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4677 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4678 default: assert(0);
4679 }
4680 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4681 {
4682 std::string desc1;
4683
4684 switch(threadCountIndex)
4685 {
4686 case 0:
4687 desc1 += "1_thread";
4688 config.ThreadCount = 1;
4689 break;
4690 case 1:
4691 desc1 += "16_threads";
4692 config.ThreadCount = 16;
4693 break;
4694 case 2:
4695 desc1 += "2_threads";
4696 config.ThreadCount = 2;
4697 break;
4698 default:
4699 assert(0);
4700 }
4701
4702 // 0 = buffers, 1 = images, 2 = buffers and images
4703 size_t buffersVsImagesCount = 2;
4704 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4705 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4706 {
4707 std::string desc2 = desc1;
4708 switch(buffersVsImagesIndex)
4709 {
4710 case 0: desc2 += " Buffers"; break;
4711 case 1: desc2 += " Images"; break;
4712 case 2: desc2 += " Buffers+Images"; break;
4713 default: assert(0);
4714 }
4715
4716 // 0 = small, 1 = large, 2 = small and large
4717 size_t smallVsLargeCount = 2;
4718 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4719 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4720 {
4721 std::string desc3 = desc2;
4722 switch(smallVsLargeIndex)
4723 {
4724 case 0: desc3 += " Small"; break;
4725 case 1: desc3 += " Large"; break;
4726 case 2: desc3 += " Small+Large"; break;
4727 default: assert(0);
4728 }
4729
4730 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4731 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4732 else
4733 config.PoolSize = 4ull * 1024 * 1024;
4734
4735 // 0 = varying sizes min...max, 1 = set of constant sizes
4736 size_t constantSizesCount = 1;
4737 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4738 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4739 {
4740 std::string desc4 = desc3;
4741 switch(constantSizesIndex)
4742 {
4743 case 0: desc4 += " Varying_sizes"; break;
4744 case 1: desc4 += " Constant_sizes"; break;
4745 default: assert(0);
4746 }
4747
4748 config.AllocationSizes.clear();
4749 // Buffers present
4750 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4751 {
4752 // Small
4753 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4754 {
4755 // Varying size
4756 if(constantSizesIndex == 0)
4757 config.AllocationSizes.push_back({4, 16, 1024});
4758 // Constant sizes
4759 else
4760 {
4761 config.AllocationSizes.push_back({1, 16, 16});
4762 config.AllocationSizes.push_back({1, 64, 64});
4763 config.AllocationSizes.push_back({1, 256, 256});
4764 config.AllocationSizes.push_back({1, 1024, 1024});
4765 }
4766 }
4767 // Large
4768 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4769 {
4770 // Varying size
4771 if(constantSizesIndex == 0)
4772 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4773 // Constant sizes
4774 else
4775 {
4776 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4777 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4778 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4779 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4780 }
4781 }
4782 }
4783 // Images present
4784 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4785 {
4786 // Small
4787 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4788 {
4789 // Varying size
4790 if(constantSizesIndex == 0)
4791 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4792 // Constant sizes
4793 else
4794 {
4795 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4796 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4797 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4798 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4799 }
4800 }
4801 // Large
4802 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4803 {
4804 // Varying size
4805 if(constantSizesIndex == 0)
4806 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4807 // Constant sizes
4808 else
4809 {
4810 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4811 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4812 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4813 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4814 }
4815 }
4816 }
4817
4818 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4819 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4820
4821 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4822 size_t subscriptionModeCount;
4823 switch(ConfigType)
4824 {
4825 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4826 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4827 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4828 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4829 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4830 default: assert(0);
4831 }
4832 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4833 {
4834 std::string desc5 = desc4;
4835
4836 switch(subscriptionModeIndex)
4837 {
4838 case 0:
4839 desc5 += " Subscription_66%";
4840 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4841 break;
4842 case 1:
4843 desc5 += " Subscription_133%";
4844 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4845 break;
4846 case 2:
4847 desc5 += " Subscription_100%";
4848 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4849 break;
4850 case 3:
4851 desc5 += " Subscription_33%";
4852 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4853 break;
4854 case 4:
4855 desc5 += " Subscription_166%";
4856 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4857 break;
4858 default:
4859 assert(0);
4860 }
4861
4862 config.TotalItemCount = config.UsedItemCountMax * 5;
4863 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4864
4865 const char* testDescription = desc5.c_str();
4866
4867 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4868 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004869 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004870
4871 PoolTestResult result{};
4872 g_MemoryAliasingWarningEnabled = false;
4873 TestPool_Benchmark(result, config);
4874 g_MemoryAliasingWarningEnabled = true;
4875 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4876 }
4877 }
4878 }
4879 }
4880 }
4881 }
4882}
4883
Adam Sawickia83793a2018-09-03 13:40:42 +02004884static void BasicTestBuddyAllocator()
4885{
4886 wprintf(L"Basic test buddy allocator\n");
4887
4888 RandomNumberGenerator rand{76543};
4889
4890 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4891 sampleBufCreateInfo.size = 1024; // Whatever.
4892 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4893
4894 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4895 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4896
4897 VmaPoolCreateInfo poolCreateInfo = {};
4898 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004899 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004900
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004901 // Deliberately adding 1023 to test usable size smaller than memory block size.
4902 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004903 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004904 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004905
4906 VmaPool pool = nullptr;
4907 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004908 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004909
4910 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4911
4912 VmaAllocationCreateInfo allocCreateInfo = {};
4913 allocCreateInfo.pool = pool;
4914
4915 std::vector<BufferInfo> bufInfo;
4916 BufferInfo newBufInfo;
4917 VmaAllocationInfo allocInfo;
4918
4919 bufCreateInfo.size = 1024 * 256;
4920 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4921 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004922 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004923 bufInfo.push_back(newBufInfo);
4924
4925 bufCreateInfo.size = 1024 * 512;
4926 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4927 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004928 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004929 bufInfo.push_back(newBufInfo);
4930
4931 bufCreateInfo.size = 1024 * 128;
4932 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4933 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004934 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004935 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004936
4937 // Test very small allocation, smaller than minimum node size.
4938 bufCreateInfo.size = 1;
4939 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4940 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004941 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004942 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004943
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004944 // Test some small allocation with alignment requirement.
4945 {
4946 VkMemoryRequirements memReq;
4947 memReq.alignment = 256;
4948 memReq.memoryTypeBits = UINT32_MAX;
4949 memReq.size = 32;
4950
4951 newBufInfo.Buffer = VK_NULL_HANDLE;
4952 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4953 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004954 TEST(res == VK_SUCCESS);
4955 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004956 bufInfo.push_back(newBufInfo);
4957 }
4958
4959 //SaveAllocatorStatsToFile(L"TEST.json");
4960
Adam Sawicki21017c62018-09-07 15:26:59 +02004961 VmaPoolStats stats = {};
4962 vmaGetPoolStats(g_hAllocator, pool, &stats);
4963 int DBG = 0; // Set breakpoint here to inspect `stats`.
4964
Adam Sawicki80927152018-09-07 17:27:23 +02004965 // Allocate enough new buffers to surely fall into second block.
4966 for(uint32_t i = 0; i < 32; ++i)
4967 {
4968 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4969 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4970 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004971 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004972 bufInfo.push_back(newBufInfo);
4973 }
4974
4975 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4976
Adam Sawickia83793a2018-09-03 13:40:42 +02004977 // Destroy the buffers in random order.
4978 while(!bufInfo.empty())
4979 {
4980 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4981 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4982 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4983 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4984 }
4985
4986 vmaDestroyPool(g_hAllocator, pool);
4987}
4988
Adam Sawickif2975342018-10-16 13:49:02 +02004989// Test the testing environment.
4990static void TestGpuData()
4991{
4992 RandomNumberGenerator rand = { 53434 };
4993
4994 std::vector<AllocInfo> allocInfo;
4995
4996 for(size_t i = 0; i < 100; ++i)
4997 {
4998 AllocInfo info = {};
4999
5000 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5001 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5002 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5003 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5004 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5005
5006 VmaAllocationCreateInfo allocCreateInfo = {};
5007 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5008
5009 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5010 TEST(res == VK_SUCCESS);
5011
5012 info.m_StartValue = rand.Generate();
5013
5014 allocInfo.push_back(std::move(info));
5015 }
5016
5017 UploadGpuData(allocInfo.data(), allocInfo.size());
5018
5019 ValidateGpuData(allocInfo.data(), allocInfo.size());
5020
5021 DestroyAllAllocations(allocInfo);
5022}
5023
Adam Sawickib8333fb2018-03-13 16:15:53 +01005024void Test()
5025{
5026 wprintf(L"TESTING:\n");
5027
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005028 if(true)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005029 {
5030 // # Temporarily insert custom tests here
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02005031 // ########################################
5032 // ########################################
Adam Sawicki80927152018-09-07 17:27:23 +02005033
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005034 TestDefragmentationSimple();
5035 TestDefragmentationFull();
5036 TestDefragmentationGpu();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005037 TestDefragmentationWholePool();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005038 return;
5039 }
5040
Adam Sawickib8333fb2018-03-13 16:15:53 +01005041 // # Simple tests
5042
5043 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005044 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005045#if VMA_DEBUG_MARGIN
5046 TestDebugMargin();
5047#else
5048 TestPool_SameSize();
5049 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01005050 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02005051#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005052#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5053 TestAllocationsInitialization();
5054#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005055 TestMapping();
5056 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005057 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005058 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005059 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005060
Adam Sawicki4338f662018-09-07 14:12:37 +02005061 BasicTestBuddyAllocator();
5062
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005063 {
5064 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005065 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005066 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005067 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005068 fclose(file);
5069 }
5070
Adam Sawickib8333fb2018-03-13 16:15:53 +01005071 TestDefragmentationSimple();
5072 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005073 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005074 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005075
5076 // # Detailed tests
5077 FILE* file;
5078 fopen_s(&file, "Results.csv", "w");
5079 assert(file != NULL);
5080
5081 WriteMainTestResultHeader(file);
5082 PerformMainTests(file);
5083 //PerformCustomMainTest(file);
5084
5085 WritePoolTestResultHeader(file);
5086 PerformPoolTests(file);
5087 //PerformCustomPoolTest(file);
5088
5089 fclose(file);
5090
5091 wprintf(L"Done.\n");
5092}
5093
Adam Sawickif1a793c2018-03-13 15:42:22 +01005094#endif // #ifdef _WIN32