blob: a512b95bb91bc6ba25411be93b87d1322c623eda [file] [log] [blame]
Adam Sawickif1a793c2018-03-13 15:42:22 +01001#include "Tests.h"
2#include "VmaUsage.h"
3#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004#include <atomic>
5#include <thread>
6#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +01007
8#ifdef _WIN32
9
Adam Sawicki33d2ce72018-08-27 13:59:13 +020010static const char* CODE_DESCRIPTION = "Foo";
11
Adam Sawickif2975342018-10-16 13:49:02 +020012extern VkCommandBuffer g_hTemporaryCommandBuffer;
13void BeginSingleTimeCommands();
14void EndSingleTimeCommands();
15
Adam Sawicki0a607132018-08-24 11:18:41 +020016enum CONFIG_TYPE {
17 CONFIG_TYPE_MINIMUM,
18 CONFIG_TYPE_SMALL,
19 CONFIG_TYPE_AVERAGE,
20 CONFIG_TYPE_LARGE,
21 CONFIG_TYPE_MAXIMUM,
22 CONFIG_TYPE_COUNT
23};
24
Adam Sawickif2975342018-10-16 13:49:02 +020025static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
26//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020027
Adam Sawickib8333fb2018-03-13 16:15:53 +010028enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
29
Adam Sawicki0667e332018-08-24 17:26:44 +020030static const char* FREE_ORDER_NAMES[] = {
31 "FORWARD",
32 "BACKWARD",
33 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020034};
35
Adam Sawicki80927152018-09-07 17:27:23 +020036// Copy of internal VmaAlgorithmToStr.
37static const char* AlgorithmToStr(uint32_t algorithm)
38{
39 switch(algorithm)
40 {
41 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
42 return "Linear";
43 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
44 return "Buddy";
45 case 0:
46 return "Default";
47 default:
48 assert(0);
49 return "";
50 }
51}
52
Adam Sawickib8333fb2018-03-13 16:15:53 +010053struct AllocationSize
54{
55 uint32_t Probability;
56 VkDeviceSize BufferSizeMin, BufferSizeMax;
57 uint32_t ImageSizeMin, ImageSizeMax;
58};
59
60struct Config
61{
62 uint32_t RandSeed;
63 VkDeviceSize BeginBytesToAllocate;
64 uint32_t AdditionalOperationCount;
65 VkDeviceSize MaxBytesToAllocate;
66 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
67 std::vector<AllocationSize> AllocationSizes;
68 uint32_t ThreadCount;
69 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
70 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020071 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010072};
73
74struct Result
75{
76 duration TotalTime;
77 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
78 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
79 VkDeviceSize TotalMemoryAllocated;
80 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
81};
82
83void TestDefragmentationSimple();
84void TestDefragmentationFull();
85
86struct PoolTestConfig
87{
88 uint32_t RandSeed;
89 uint32_t ThreadCount;
90 VkDeviceSize PoolSize;
91 uint32_t FrameCount;
92 uint32_t TotalItemCount;
93 // Range for number of items used in each frame.
94 uint32_t UsedItemCountMin, UsedItemCountMax;
95 // Percent of items to make unused, and possibly make some others used in each frame.
96 uint32_t ItemsToMakeUnusedPercent;
97 std::vector<AllocationSize> AllocationSizes;
98
99 VkDeviceSize CalcAvgResourceSize() const
100 {
101 uint32_t probabilitySum = 0;
102 VkDeviceSize sizeSum = 0;
103 for(size_t i = 0; i < AllocationSizes.size(); ++i)
104 {
105 const AllocationSize& allocSize = AllocationSizes[i];
106 if(allocSize.BufferSizeMax > 0)
107 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
108 else
109 {
110 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
111 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
112 }
113 probabilitySum += allocSize.Probability;
114 }
115 return sizeSum / probabilitySum;
116 }
117
118 bool UsesBuffers() const
119 {
120 for(size_t i = 0; i < AllocationSizes.size(); ++i)
121 if(AllocationSizes[i].BufferSizeMax > 0)
122 return true;
123 return false;
124 }
125
126 bool UsesImages() const
127 {
128 for(size_t i = 0; i < AllocationSizes.size(); ++i)
129 if(AllocationSizes[i].ImageSizeMax > 0)
130 return true;
131 return false;
132 }
133};
134
135struct PoolTestResult
136{
137 duration TotalTime;
138 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
139 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
140 size_t LostAllocationCount, LostAllocationTotalSize;
141 size_t FailedAllocationCount, FailedAllocationTotalSize;
142};
143
144static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
145
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200146static uint32_t g_FrameIndex = 0;
147
Adam Sawickib8333fb2018-03-13 16:15:53 +0100148struct BufferInfo
149{
150 VkBuffer Buffer = VK_NULL_HANDLE;
151 VmaAllocation Allocation = VK_NULL_HANDLE;
152};
153
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200154static uint32_t GetAllocationStrategyCount()
155{
156 uint32_t strategyCount = 0;
157 switch(ConfigType)
158 {
159 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
160 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
161 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
162 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
163 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
164 default: assert(0);
165 }
166 return strategyCount;
167}
168
169static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
170{
171 switch(allocStrategy)
172 {
173 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
174 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
175 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
176 case 0: return "Default"; break;
177 default: assert(0); return "";
178 }
179}
180
Adam Sawickib8333fb2018-03-13 16:15:53 +0100181static void InitResult(Result& outResult)
182{
183 outResult.TotalTime = duration::zero();
184 outResult.AllocationTimeMin = duration::max();
185 outResult.AllocationTimeAvg = duration::zero();
186 outResult.AllocationTimeMax = duration::min();
187 outResult.DeallocationTimeMin = duration::max();
188 outResult.DeallocationTimeAvg = duration::zero();
189 outResult.DeallocationTimeMax = duration::min();
190 outResult.TotalMemoryAllocated = 0;
191 outResult.FreeRangeSizeAvg = 0;
192 outResult.FreeRangeSizeMax = 0;
193}
194
195class TimeRegisterObj
196{
197public:
198 TimeRegisterObj(duration& min, duration& sum, duration& max) :
199 m_Min(min),
200 m_Sum(sum),
201 m_Max(max),
202 m_TimeBeg(std::chrono::high_resolution_clock::now())
203 {
204 }
205
206 ~TimeRegisterObj()
207 {
208 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
209 m_Sum += d;
210 if(d < m_Min) m_Min = d;
211 if(d > m_Max) m_Max = d;
212 }
213
214private:
215 duration& m_Min;
216 duration& m_Sum;
217 duration& m_Max;
218 time_point m_TimeBeg;
219};
220
221struct PoolTestThreadResult
222{
223 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
224 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
225 size_t AllocationCount, DeallocationCount;
226 size_t LostAllocationCount, LostAllocationTotalSize;
227 size_t FailedAllocationCount, FailedAllocationTotalSize;
228};
229
230class AllocationTimeRegisterObj : public TimeRegisterObj
231{
232public:
233 AllocationTimeRegisterObj(Result& result) :
234 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
235 {
236 }
237};
238
239class DeallocationTimeRegisterObj : public TimeRegisterObj
240{
241public:
242 DeallocationTimeRegisterObj(Result& result) :
243 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
244 {
245 }
246};
247
248class PoolAllocationTimeRegisterObj : public TimeRegisterObj
249{
250public:
251 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
252 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
253 {
254 }
255};
256
257class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
258{
259public:
260 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
261 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
262 {
263 }
264};
265
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200266static void CurrentTimeToStr(std::string& out)
267{
268 time_t rawTime; time(&rawTime);
269 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
270 char timeStr[128];
271 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
272 out = timeStr;
273}
274
Adam Sawickib8333fb2018-03-13 16:15:53 +0100275VkResult MainTest(Result& outResult, const Config& config)
276{
277 assert(config.ThreadCount > 0);
278
279 InitResult(outResult);
280
281 RandomNumberGenerator mainRand{config.RandSeed};
282
283 time_point timeBeg = std::chrono::high_resolution_clock::now();
284
285 std::atomic<size_t> allocationCount = 0;
286 VkResult res = VK_SUCCESS;
287
288 uint32_t memUsageProbabilitySum =
289 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
290 config.MemUsageProbability[2] + config.MemUsageProbability[3];
291 assert(memUsageProbabilitySum > 0);
292
293 uint32_t allocationSizeProbabilitySum = std::accumulate(
294 config.AllocationSizes.begin(),
295 config.AllocationSizes.end(),
296 0u,
297 [](uint32_t sum, const AllocationSize& allocSize) {
298 return sum + allocSize.Probability;
299 });
300
301 struct Allocation
302 {
303 VkBuffer Buffer;
304 VkImage Image;
305 VmaAllocation Alloc;
306 };
307
308 std::vector<Allocation> commonAllocations;
309 std::mutex commonAllocationsMutex;
310
311 auto Allocate = [&](
312 VkDeviceSize bufferSize,
313 const VkExtent2D imageExtent,
314 RandomNumberGenerator& localRand,
315 VkDeviceSize& totalAllocatedBytes,
316 std::vector<Allocation>& allocations) -> VkResult
317 {
318 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
319
320 uint32_t memUsageIndex = 0;
321 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
322 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
323 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
324
325 VmaAllocationCreateInfo memReq = {};
326 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200327 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100328
329 Allocation allocation = {};
330 VmaAllocationInfo allocationInfo;
331
332 // Buffer
333 if(bufferSize > 0)
334 {
335 assert(imageExtent.width == 0);
336 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
337 bufferInfo.size = bufferSize;
338 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
339
340 {
341 AllocationTimeRegisterObj timeRegisterObj{outResult};
342 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
343 }
344 }
345 // Image
346 else
347 {
348 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
349 imageInfo.imageType = VK_IMAGE_TYPE_2D;
350 imageInfo.extent.width = imageExtent.width;
351 imageInfo.extent.height = imageExtent.height;
352 imageInfo.extent.depth = 1;
353 imageInfo.mipLevels = 1;
354 imageInfo.arrayLayers = 1;
355 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
356 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
357 VK_IMAGE_TILING_OPTIMAL :
358 VK_IMAGE_TILING_LINEAR;
359 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
360 switch(memReq.usage)
361 {
362 case VMA_MEMORY_USAGE_GPU_ONLY:
363 switch(localRand.Generate() % 3)
364 {
365 case 0:
366 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
367 break;
368 case 1:
369 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
370 break;
371 case 2:
372 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
373 break;
374 }
375 break;
376 case VMA_MEMORY_USAGE_CPU_ONLY:
377 case VMA_MEMORY_USAGE_CPU_TO_GPU:
378 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
379 break;
380 case VMA_MEMORY_USAGE_GPU_TO_CPU:
381 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
382 break;
383 }
384 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
385 imageInfo.flags = 0;
386
387 {
388 AllocationTimeRegisterObj timeRegisterObj{outResult};
389 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
390 }
391 }
392
393 if(res == VK_SUCCESS)
394 {
395 ++allocationCount;
396 totalAllocatedBytes += allocationInfo.size;
397 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
398 if(useCommonAllocations)
399 {
400 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
401 commonAllocations.push_back(allocation);
402 }
403 else
404 allocations.push_back(allocation);
405 }
406 else
407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200408 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100409 }
410 return res;
411 };
412
413 auto GetNextAllocationSize = [&](
414 VkDeviceSize& outBufSize,
415 VkExtent2D& outImageSize,
416 RandomNumberGenerator& localRand)
417 {
418 outBufSize = 0;
419 outImageSize = {0, 0};
420
421 uint32_t allocSizeIndex = 0;
422 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
423 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
424 r -= config.AllocationSizes[allocSizeIndex++].Probability;
425
426 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
427 if(allocSize.BufferSizeMax > 0)
428 {
429 assert(allocSize.ImageSizeMax == 0);
430 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
431 outBufSize = allocSize.BufferSizeMin;
432 else
433 {
434 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
435 outBufSize = outBufSize / 16 * 16;
436 }
437 }
438 else
439 {
440 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
441 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
442 else
443 {
444 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
445 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
446 }
447 }
448 };
449
450 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
451 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
452
453 auto ThreadProc = [&](uint32_t randSeed) -> void
454 {
455 RandomNumberGenerator threadRand(randSeed);
456 VkDeviceSize threadTotalAllocatedBytes = 0;
457 std::vector<Allocation> threadAllocations;
458 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
459 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
460 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
461
462 // BEGIN ALLOCATIONS
463 for(;;)
464 {
465 VkDeviceSize bufferSize = 0;
466 VkExtent2D imageExtent = {};
467 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
468 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
469 threadBeginBytesToAllocate)
470 {
471 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
472 break;
473 }
474 else
475 break;
476 }
477
478 // ADDITIONAL ALLOCATIONS AND FREES
479 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
480 {
481 VkDeviceSize bufferSize = 0;
482 VkExtent2D imageExtent = {};
483 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
484
485 // true = allocate, false = free
486 bool allocate = threadRand.Generate() % 2 != 0;
487
488 if(allocate)
489 {
490 if(threadTotalAllocatedBytes +
491 bufferSize +
492 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
493 threadMaxBytesToAllocate)
494 {
495 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
496 break;
497 }
498 }
499 else
500 {
501 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
502 if(useCommonAllocations)
503 {
504 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
505 if(!commonAllocations.empty())
506 {
507 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
508 VmaAllocationInfo allocationInfo;
509 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
510 if(threadTotalAllocatedBytes >= allocationInfo.size)
511 {
512 DeallocationTimeRegisterObj timeRegisterObj{outResult};
513 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
514 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
515 else
516 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
517 threadTotalAllocatedBytes -= allocationInfo.size;
518 commonAllocations.erase(commonAllocations.begin() + indexToFree);
519 }
520 }
521 }
522 else
523 {
524 if(!threadAllocations.empty())
525 {
526 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
527 VmaAllocationInfo allocationInfo;
528 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
529 if(threadTotalAllocatedBytes >= allocationInfo.size)
530 {
531 DeallocationTimeRegisterObj timeRegisterObj{outResult};
532 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
533 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
534 else
535 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
536 threadTotalAllocatedBytes -= allocationInfo.size;
537 threadAllocations.erase(threadAllocations.begin() + indexToFree);
538 }
539 }
540 }
541 }
542 }
543
544 ++numThreadsReachedMaxAllocations;
545
546 WaitForSingleObject(threadsFinishEvent, INFINITE);
547
548 // DEALLOCATION
549 while(!threadAllocations.empty())
550 {
551 size_t indexToFree = 0;
552 switch(config.FreeOrder)
553 {
554 case FREE_ORDER::FORWARD:
555 indexToFree = 0;
556 break;
557 case FREE_ORDER::BACKWARD:
558 indexToFree = threadAllocations.size() - 1;
559 break;
560 case FREE_ORDER::RANDOM:
561 indexToFree = mainRand.Generate() % threadAllocations.size();
562 break;
563 }
564
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 }
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 };
575
576 uint32_t threadRandSeed = mainRand.Generate();
577 std::vector<std::thread> bkgThreads;
578 for(size_t i = 0; i < config.ThreadCount; ++i)
579 {
580 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
581 }
582
583 // Wait for threads reached max allocations
584 while(numThreadsReachedMaxAllocations < config.ThreadCount)
585 Sleep(0);
586
587 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
588 VmaStats vmaStats = {};
589 vmaCalculateStats(g_hAllocator, &vmaStats);
590 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
591 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
592 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
593
594 // Signal threads to deallocate
595 SetEvent(threadsFinishEvent);
596
597 // Wait for threads finished
598 for(size_t i = 0; i < bkgThreads.size(); ++i)
599 bkgThreads[i].join();
600 bkgThreads.clear();
601
602 CloseHandle(threadsFinishEvent);
603
604 // Deallocate remaining common resources
605 while(!commonAllocations.empty())
606 {
607 size_t indexToFree = 0;
608 switch(config.FreeOrder)
609 {
610 case FREE_ORDER::FORWARD:
611 indexToFree = 0;
612 break;
613 case FREE_ORDER::BACKWARD:
614 indexToFree = commonAllocations.size() - 1;
615 break;
616 case FREE_ORDER::RANDOM:
617 indexToFree = mainRand.Generate() % commonAllocations.size();
618 break;
619 }
620
621 {
622 DeallocationTimeRegisterObj timeRegisterObj{outResult};
623 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
624 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
625 else
626 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
627 }
628 commonAllocations.erase(commonAllocations.begin() + indexToFree);
629 }
630
631 if(allocationCount)
632 {
633 outResult.AllocationTimeAvg /= allocationCount;
634 outResult.DeallocationTimeAvg /= allocationCount;
635 }
636
637 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
638
639 return res;
640}
641
Adam Sawickie44c6262018-06-15 14:30:39 +0200642static void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100643{
644 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200645 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100646 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200647 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100648}
649
650struct AllocInfo
651{
652 VmaAllocation m_Allocation;
653 VkBuffer m_Buffer;
654 VkImage m_Image;
655 uint32_t m_StartValue;
656 union
657 {
658 VkBufferCreateInfo m_BufferInfo;
659 VkImageCreateInfo m_ImageInfo;
660 };
661};
662
Adam Sawickif2975342018-10-16 13:49:02 +0200663class StagingBufferCollection
664{
665public:
666 StagingBufferCollection() { }
667 ~StagingBufferCollection();
668 // Returns false if maximum total size of buffers would be exceeded.
669 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
670 void ReleaseAllBuffers();
671
672private:
673 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
674 struct BufInfo
675 {
676 VmaAllocation Allocation = VK_NULL_HANDLE;
677 VkBuffer Buffer = VK_NULL_HANDLE;
678 VkDeviceSize Size = VK_WHOLE_SIZE;
679 void* MappedPtr = nullptr;
680 bool Used = false;
681 };
682 std::vector<BufInfo> m_Bufs;
683 // Including both used and unused.
684 VkDeviceSize m_TotalSize = 0;
685};
686
687StagingBufferCollection::~StagingBufferCollection()
688{
689 for(size_t i = m_Bufs.size(); i--; )
690 {
691 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
692 }
693}
694
695bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
696{
697 assert(size <= MAX_TOTAL_SIZE);
698
699 // Try to find existing unused buffer with best size.
700 size_t bestIndex = SIZE_MAX;
701 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
702 {
703 BufInfo& currBufInfo = m_Bufs[i];
704 if(!currBufInfo.Used && currBufInfo.Size >= size &&
705 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
706 {
707 bestIndex = i;
708 }
709 }
710
711 if(bestIndex != SIZE_MAX)
712 {
713 m_Bufs[bestIndex].Used = true;
714 outBuffer = m_Bufs[bestIndex].Buffer;
715 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
716 return true;
717 }
718
719 // Allocate new buffer with requested size.
720 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
721 {
722 BufInfo bufInfo;
723 bufInfo.Size = size;
724 bufInfo.Used = true;
725
726 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
727 bufCreateInfo.size = size;
728 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
729
730 VmaAllocationCreateInfo allocCreateInfo = {};
731 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
732 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
733
734 VmaAllocationInfo allocInfo;
735 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
736 bufInfo.MappedPtr = allocInfo.pMappedData;
737 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
738
739 outBuffer = bufInfo.Buffer;
740 outMappedPtr = bufInfo.MappedPtr;
741
742 m_Bufs.push_back(std::move(bufInfo));
743
744 m_TotalSize += size;
745
746 return true;
747 }
748
749 // There are some unused but smaller buffers: Free them and try again.
750 bool hasUnused = false;
751 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
752 {
753 if(!m_Bufs[i].Used)
754 {
755 hasUnused = true;
756 break;
757 }
758 }
759 if(hasUnused)
760 {
761 for(size_t i = m_Bufs.size(); i--; )
762 {
763 if(!m_Bufs[i].Used)
764 {
765 m_TotalSize -= m_Bufs[i].Size;
766 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
767 m_Bufs.erase(m_Bufs.begin() + i);
768 }
769 }
770
771 return AcquireBuffer(size, outBuffer, outMappedPtr);
772 }
773
774 return false;
775}
776
777void StagingBufferCollection::ReleaseAllBuffers()
778{
779 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
780 {
781 m_Bufs[i].Used = false;
782 }
783}
784
785static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
786{
787 StagingBufferCollection stagingBufs;
788
789 bool cmdBufferStarted = false;
790 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
791 {
792 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
793 if(currAllocInfo.m_Buffer)
794 {
795 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
796
797 VkBuffer stagingBuf = VK_NULL_HANDLE;
798 void* stagingBufMappedPtr = nullptr;
799 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
800 {
801 TEST(cmdBufferStarted);
802 EndSingleTimeCommands();
803 stagingBufs.ReleaseAllBuffers();
804 cmdBufferStarted = false;
805
806 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
807 TEST(ok);
808 }
809
810 // Fill staging buffer.
811 {
812 assert(size % sizeof(uint32_t) == 0);
813 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
814 uint32_t val = currAllocInfo.m_StartValue;
815 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
816 {
817 *stagingValPtr = val;
818 ++stagingValPtr;
819 ++val;
820 }
821 }
822
823 // Issue copy command from staging buffer to destination buffer.
824 if(!cmdBufferStarted)
825 {
826 cmdBufferStarted = true;
827 BeginSingleTimeCommands();
828 }
829
830 VkBufferCopy copy = {};
831 copy.srcOffset = 0;
832 copy.dstOffset = 0;
833 copy.size = size;
834 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
835 }
836 else
837 {
838 TEST(0 && "Images not currently supported.");
839 }
840 }
841
842 if(cmdBufferStarted)
843 {
844 EndSingleTimeCommands();
845 stagingBufs.ReleaseAllBuffers();
846 }
847}
848
849static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
850{
851 StagingBufferCollection stagingBufs;
852
853 bool cmdBufferStarted = false;
854 size_t validateAllocIndexOffset = 0;
855 std::vector<void*> validateStagingBuffers;
856 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
857 {
858 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
859 if(currAllocInfo.m_Buffer)
860 {
861 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
862
863 VkBuffer stagingBuf = VK_NULL_HANDLE;
864 void* stagingBufMappedPtr = nullptr;
865 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
866 {
867 TEST(cmdBufferStarted);
868 EndSingleTimeCommands();
869 cmdBufferStarted = false;
870
871 for(size_t validateIndex = 0;
872 validateIndex < validateStagingBuffers.size();
873 ++validateIndex)
874 {
875 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
876 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
877 TEST(validateSize % sizeof(uint32_t) == 0);
878 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
879 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
880 bool valid = true;
881 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
882 {
883 if(*stagingValPtr != val)
884 {
885 valid = false;
886 break;
887 }
888 ++stagingValPtr;
889 ++val;
890 }
891 TEST(valid);
892 }
893
894 stagingBufs.ReleaseAllBuffers();
895
896 validateAllocIndexOffset = allocInfoIndex;
897 validateStagingBuffers.clear();
898
899 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
900 TEST(ok);
901 }
902
903 // Issue copy command from staging buffer to destination buffer.
904 if(!cmdBufferStarted)
905 {
906 cmdBufferStarted = true;
907 BeginSingleTimeCommands();
908 }
909
910 VkBufferCopy copy = {};
911 copy.srcOffset = 0;
912 copy.dstOffset = 0;
913 copy.size = size;
914 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
915
916 // Sava mapped pointer for later validation.
917 validateStagingBuffers.push_back(stagingBufMappedPtr);
918 }
919 else
920 {
921 TEST(0 && "Images not currently supported.");
922 }
923 }
924
925 if(cmdBufferStarted)
926 {
927 EndSingleTimeCommands();
928
929 for(size_t validateIndex = 0;
930 validateIndex < validateStagingBuffers.size();
931 ++validateIndex)
932 {
933 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
934 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
935 TEST(validateSize % sizeof(uint32_t) == 0);
936 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
937 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
938 bool valid = true;
939 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
940 {
941 if(*stagingValPtr != val)
942 {
943 valid = false;
944 break;
945 }
946 ++stagingValPtr;
947 ++val;
948 }
949 TEST(valid);
950 }
951
952 stagingBufs.ReleaseAllBuffers();
953 }
954}
955
Adam Sawickib8333fb2018-03-13 16:15:53 +0100956static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
957{
958 outMemReq = {};
959 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
960 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
961}
962
963static void CreateBuffer(
964 VmaPool pool,
965 const VkBufferCreateInfo& bufCreateInfo,
966 bool persistentlyMapped,
967 AllocInfo& outAllocInfo)
968{
969 outAllocInfo = {};
970 outAllocInfo.m_BufferInfo = bufCreateInfo;
971
972 VmaAllocationCreateInfo allocCreateInfo = {};
973 allocCreateInfo.pool = pool;
974 if(persistentlyMapped)
975 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
976
977 VmaAllocationInfo vmaAllocInfo = {};
978 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
979
980 // Setup StartValue and fill.
981 {
982 outAllocInfo.m_StartValue = (uint32_t)rand();
983 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +0200984 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100985 if(!persistentlyMapped)
986 {
987 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
988 }
989
990 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +0200991 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100992 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
993 data[i] = value++;
994
995 if(!persistentlyMapped)
996 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
997 }
998}
999
1000static void CreateAllocation(AllocInfo& outAllocation, VmaAllocator allocator)
1001{
1002 outAllocation.m_Allocation = nullptr;
1003 outAllocation.m_Buffer = nullptr;
1004 outAllocation.m_Image = nullptr;
1005 outAllocation.m_StartValue = (uint32_t)rand();
1006
1007 VmaAllocationCreateInfo vmaMemReq;
1008 GetMemReq(vmaMemReq);
1009
1010 VmaAllocationInfo allocInfo;
1011
1012 const bool isBuffer = true;//(rand() & 0x1) != 0;
1013 const bool isLarge = (rand() % 16) == 0;
1014 if(isBuffer)
1015 {
1016 const uint32_t bufferSize = isLarge ?
1017 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1018 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1019
1020 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1021 bufferInfo.size = bufferSize;
1022 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1023
1024 VkResult res = vmaCreateBuffer(allocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
1025 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001026 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001027 }
1028 else
1029 {
1030 const uint32_t imageSizeX = isLarge ?
1031 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1032 rand() % 1024 + 1; // 1 ... 1024
1033 const uint32_t imageSizeY = isLarge ?
1034 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1035 rand() % 1024 + 1; // 1 ... 1024
1036
1037 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1038 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1039 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1040 imageInfo.extent.width = imageSizeX;
1041 imageInfo.extent.height = imageSizeY;
1042 imageInfo.extent.depth = 1;
1043 imageInfo.mipLevels = 1;
1044 imageInfo.arrayLayers = 1;
1045 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1046 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1047 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1048 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1049
1050 VkResult res = vmaCreateImage(allocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
1051 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001052 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001053 }
1054
1055 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1056 if(allocInfo.pMappedData == nullptr)
1057 {
1058 VkResult res = vmaMapMemory(allocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001059 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001060 }
1061
1062 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001063 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001064 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1065 data[i] = value++;
1066
1067 if(allocInfo.pMappedData == nullptr)
1068 vmaUnmapMemory(allocator, outAllocation.m_Allocation);
1069}
1070
1071static void DestroyAllocation(const AllocInfo& allocation)
1072{
1073 if(allocation.m_Buffer)
1074 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1075 else
1076 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1077}
1078
1079static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1080{
1081 for(size_t i = allocations.size(); i--; )
1082 DestroyAllocation(allocations[i]);
1083 allocations.clear();
1084}
1085
1086static void ValidateAllocationData(const AllocInfo& allocation)
1087{
1088 VmaAllocationInfo allocInfo;
1089 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1090
1091 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1092 if(allocInfo.pMappedData == nullptr)
1093 {
1094 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001095 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001096 }
1097
1098 uint32_t value = allocation.m_StartValue;
1099 bool ok = true;
1100 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001101 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001102 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1103 {
1104 if(data[i] != value++)
1105 {
1106 ok = false;
1107 break;
1108 }
1109 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001110 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001111
1112 if(allocInfo.pMappedData == nullptr)
1113 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1114}
1115
1116static void RecreateAllocationResource(AllocInfo& allocation)
1117{
1118 VmaAllocationInfo allocInfo;
1119 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1120
1121 if(allocation.m_Buffer)
1122 {
1123 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1124
1125 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001126 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001127
1128 // Just to silence validation layer warnings.
1129 VkMemoryRequirements vkMemReq;
1130 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001131 TEST(vkMemReq.size == allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001132
1133 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001134 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001135 }
1136 else
1137 {
1138 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1139
1140 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001141 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001142
1143 // Just to silence validation layer warnings.
1144 VkMemoryRequirements vkMemReq;
1145 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1146
1147 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001148 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001149 }
1150}
1151
1152static void Defragment(AllocInfo* allocs, size_t allocCount,
1153 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1154 VmaDefragmentationStats* defragmentationStats = nullptr)
1155{
1156 std::vector<VmaAllocation> vmaAllocs(allocCount);
1157 for(size_t i = 0; i < allocCount; ++i)
1158 vmaAllocs[i] = allocs[i].m_Allocation;
1159
1160 std::vector<VkBool32> allocChanged(allocCount);
1161
1162 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1163 defragmentationInfo, defragmentationStats) );
1164
1165 for(size_t i = 0; i < allocCount; ++i)
1166 {
1167 if(allocChanged[i])
1168 {
1169 RecreateAllocationResource(allocs[i]);
1170 }
1171 }
1172}
1173
1174static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1175{
1176 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1177 ValidateAllocationData(allocInfo);
1178 });
1179}
1180
1181void TestDefragmentationSimple()
1182{
1183 wprintf(L"Test defragmentation simple\n");
1184
1185 RandomNumberGenerator rand(667);
1186
1187 const VkDeviceSize BUF_SIZE = 0x10000;
1188 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1189
1190 const VkDeviceSize MIN_BUF_SIZE = 32;
1191 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1192 auto RandomBufSize = [&]() -> VkDeviceSize {
1193 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1194 };
1195
1196 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1197 bufCreateInfo.size = BUF_SIZE;
1198 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1199
1200 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1201 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1202
1203 uint32_t memTypeIndex = UINT32_MAX;
1204 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1205
1206 VmaPoolCreateInfo poolCreateInfo = {};
1207 poolCreateInfo.blockSize = BLOCK_SIZE;
1208 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1209
1210 VmaPool pool;
1211 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1212
1213 std::vector<AllocInfo> allocations;
1214
1215 // persistentlyMappedOption = 0 - not persistently mapped.
1216 // persistentlyMappedOption = 1 - persistently mapped.
1217 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1218 {
1219 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1220 const bool persistentlyMapped = persistentlyMappedOption != 0;
1221
1222 // # Test 1
1223 // Buffers of fixed size.
1224 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1225 // Expected result: at least 1 block freed.
1226 {
1227 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1228 {
1229 AllocInfo allocInfo;
1230 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1231 allocations.push_back(allocInfo);
1232 }
1233
1234 for(size_t i = 1; i < allocations.size(); ++i)
1235 {
1236 DestroyAllocation(allocations[i]);
1237 allocations.erase(allocations.begin() + i);
1238 }
1239
1240 VmaDefragmentationStats defragStats;
1241 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001242 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1243 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001244
1245 ValidateAllocationsData(allocations.data(), allocations.size());
1246
1247 DestroyAllAllocations(allocations);
1248 }
1249
1250 // # Test 2
1251 // Buffers of fixed size.
1252 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1253 // Expected result: Each of 4 interations makes some progress.
1254 {
1255 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1256 {
1257 AllocInfo allocInfo;
1258 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1259 allocations.push_back(allocInfo);
1260 }
1261
1262 for(size_t i = 1; i < allocations.size(); ++i)
1263 {
1264 DestroyAllocation(allocations[i]);
1265 allocations.erase(allocations.begin() + i);
1266 }
1267
1268 VmaDefragmentationInfo defragInfo = {};
1269 defragInfo.maxAllocationsToMove = 1;
1270 defragInfo.maxBytesToMove = BUF_SIZE;
1271
1272 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1273 {
1274 VmaDefragmentationStats defragStats;
1275 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001276 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001277 }
1278
1279 ValidateAllocationsData(allocations.data(), allocations.size());
1280
1281 DestroyAllAllocations(allocations);
1282 }
1283
1284 // # Test 3
1285 // Buffers of variable size.
1286 // Create a number of buffers. Remove some percent of them.
1287 // Defragment while having some percent of them unmovable.
1288 // Expected result: Just simple validation.
1289 {
1290 for(size_t i = 0; i < 100; ++i)
1291 {
1292 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1293 localBufCreateInfo.size = RandomBufSize();
1294
1295 AllocInfo allocInfo;
1296 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1297 allocations.push_back(allocInfo);
1298 }
1299
1300 const uint32_t percentToDelete = 60;
1301 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1302 for(size_t i = 0; i < numberToDelete; ++i)
1303 {
1304 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1305 DestroyAllocation(allocations[indexToDelete]);
1306 allocations.erase(allocations.begin() + indexToDelete);
1307 }
1308
1309 // Non-movable allocations will be at the beginning of allocations array.
1310 const uint32_t percentNonMovable = 20;
1311 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1312 for(size_t i = 0; i < numberNonMovable; ++i)
1313 {
1314 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1315 if(indexNonMovable != i)
1316 std::swap(allocations[i], allocations[indexNonMovable]);
1317 }
1318
1319 VmaDefragmentationStats defragStats;
1320 Defragment(
1321 allocations.data() + numberNonMovable,
1322 allocations.size() - numberNonMovable,
1323 nullptr, &defragStats);
1324
1325 ValidateAllocationsData(allocations.data(), allocations.size());
1326
1327 DestroyAllAllocations(allocations);
1328 }
1329 }
1330
1331 vmaDestroyPool(g_hAllocator, pool);
1332}
1333
1334void TestDefragmentationFull()
1335{
1336 std::vector<AllocInfo> allocations;
1337
1338 // Create initial allocations.
1339 for(size_t i = 0; i < 400; ++i)
1340 {
1341 AllocInfo allocation;
1342 CreateAllocation(allocation, g_hAllocator);
1343 allocations.push_back(allocation);
1344 }
1345
1346 // Delete random allocations
1347 const size_t allocationsToDeletePercent = 80;
1348 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1349 for(size_t i = 0; i < allocationsToDelete; ++i)
1350 {
1351 size_t index = (size_t)rand() % allocations.size();
1352 DestroyAllocation(allocations[index]);
1353 allocations.erase(allocations.begin() + index);
1354 }
1355
1356 for(size_t i = 0; i < allocations.size(); ++i)
1357 ValidateAllocationData(allocations[i]);
1358
Adam Sawicki0667e332018-08-24 17:26:44 +02001359 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001360
1361 {
1362 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1363 for(size_t i = 0; i < allocations.size(); ++i)
1364 vmaAllocations[i] = allocations[i].m_Allocation;
1365
1366 const size_t nonMovablePercent = 0;
1367 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1368 for(size_t i = 0; i < nonMovableCount; ++i)
1369 {
1370 size_t index = (size_t)rand() % vmaAllocations.size();
1371 vmaAllocations.erase(vmaAllocations.begin() + index);
1372 }
1373
1374 const uint32_t defragCount = 1;
1375 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1376 {
1377 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1378
1379 VmaDefragmentationInfo defragmentationInfo;
1380 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1381 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1382
1383 wprintf(L"Defragmentation #%u\n", defragIndex);
1384
1385 time_point begTime = std::chrono::high_resolution_clock::now();
1386
1387 VmaDefragmentationStats stats;
1388 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001389 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001390
1391 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1392
1393 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1394 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1395 wprintf(L"Time: %.2f s\n", defragmentDuration);
1396
1397 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1398 {
1399 if(allocationsChanged[i])
1400 {
1401 RecreateAllocationResource(allocations[i]);
1402 }
1403 }
1404
1405 for(size_t i = 0; i < allocations.size(); ++i)
1406 ValidateAllocationData(allocations[i]);
1407
Adam Sawicki0667e332018-08-24 17:26:44 +02001408 //wchar_t fileName[MAX_PATH];
1409 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1410 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001411 }
1412 }
1413
1414 // Destroy all remaining allocations.
1415 DestroyAllAllocations(allocations);
1416}
1417
1418static void TestUserData()
1419{
1420 VkResult res;
1421
1422 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1423 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1424 bufCreateInfo.size = 0x10000;
1425
1426 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1427 {
1428 // Opaque pointer
1429 {
1430
1431 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1432 void* pointerToSomething = &res;
1433
1434 VmaAllocationCreateInfo allocCreateInfo = {};
1435 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1436 allocCreateInfo.pUserData = numberAsPointer;
1437 if(testIndex == 1)
1438 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1439
1440 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1441 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001442 TEST(res == VK_SUCCESS);
1443 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001444
1445 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001446 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001447
1448 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1449 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001450 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001451
1452 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1453 }
1454
1455 // String
1456 {
1457 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1458 const char* name2 = "2";
1459 const size_t name1Len = strlen(name1);
1460
1461 char* name1Buf = new char[name1Len + 1];
1462 strcpy_s(name1Buf, name1Len + 1, name1);
1463
1464 VmaAllocationCreateInfo allocCreateInfo = {};
1465 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1466 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1467 allocCreateInfo.pUserData = name1Buf;
1468 if(testIndex == 1)
1469 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1470
1471 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1472 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001473 TEST(res == VK_SUCCESS);
1474 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1475 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001476
1477 delete[] name1Buf;
1478
1479 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001480 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001481
1482 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1483 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001484 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001485
1486 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1487 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001488 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001489
1490 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1491 }
1492 }
1493}
1494
1495static void TestMemoryRequirements()
1496{
1497 VkResult res;
1498 VkBuffer buf;
1499 VmaAllocation alloc;
1500 VmaAllocationInfo allocInfo;
1501
1502 const VkPhysicalDeviceMemoryProperties* memProps;
1503 vmaGetMemoryProperties(g_hAllocator, &memProps);
1504
1505 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1506 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1507 bufInfo.size = 128;
1508
1509 VmaAllocationCreateInfo allocCreateInfo = {};
1510
1511 // No requirements.
1512 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001513 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001514 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1515
1516 // Usage.
1517 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1518 allocCreateInfo.requiredFlags = 0;
1519 allocCreateInfo.preferredFlags = 0;
1520 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1521
1522 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001523 TEST(res == VK_SUCCESS);
1524 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001525 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1526
1527 // Required flags, preferred flags.
1528 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1529 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1530 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1531 allocCreateInfo.memoryTypeBits = 0;
1532
1533 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001534 TEST(res == VK_SUCCESS);
1535 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1536 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001537 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1538
1539 // memoryTypeBits.
1540 const uint32_t memType = allocInfo.memoryType;
1541 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1542 allocCreateInfo.requiredFlags = 0;
1543 allocCreateInfo.preferredFlags = 0;
1544 allocCreateInfo.memoryTypeBits = 1u << memType;
1545
1546 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001547 TEST(res == VK_SUCCESS);
1548 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001549 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1550
1551}
1552
1553static void TestBasics()
1554{
1555 VkResult res;
1556
1557 TestMemoryRequirements();
1558
1559 // Lost allocation
1560 {
1561 VmaAllocation alloc = VK_NULL_HANDLE;
1562 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001563 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001564
1565 VmaAllocationInfo allocInfo;
1566 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001567 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1568 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001569
1570 vmaFreeMemory(g_hAllocator, alloc);
1571 }
1572
1573 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1574 {
1575 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1576 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1577 bufCreateInfo.size = 128;
1578
1579 VmaAllocationCreateInfo allocCreateInfo = {};
1580 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1581 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1582
1583 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1584 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001585 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001586
1587 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1588
1589 // Same with OWN_MEMORY.
1590 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1591
1592 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001593 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001594
1595 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1596 }
1597
1598 TestUserData();
1599}
1600
1601void TestHeapSizeLimit()
1602{
1603 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1604 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1605
1606 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1607 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1608 {
1609 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1610 }
1611
1612 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1613 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1614 allocatorCreateInfo.device = g_hDevice;
1615 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1616
1617 VmaAllocator hAllocator;
1618 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001619 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001620
1621 struct Item
1622 {
1623 VkBuffer hBuf;
1624 VmaAllocation hAlloc;
1625 };
1626 std::vector<Item> items;
1627
1628 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1629 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1630
1631 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
1632 VmaAllocationInfo ownAllocInfo;
1633 {
1634 VmaAllocationCreateInfo allocCreateInfo = {};
1635 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1636 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1637
1638 bufCreateInfo.size = BLOCK_SIZE / 2;
1639
1640 for(size_t i = 0; i < 2; ++i)
1641 {
1642 Item item;
1643 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001644 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001645 items.push_back(item);
1646 }
1647 }
1648
1649 // Create pool to make sure allocations must be out of this memory type.
1650 VmaPoolCreateInfo poolCreateInfo = {};
1651 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
1652 poolCreateInfo.blockSize = BLOCK_SIZE;
1653
1654 VmaPool hPool;
1655 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001656 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001657
1658 // 2. Allocate normal buffers from all the remaining memory.
1659 {
1660 VmaAllocationCreateInfo allocCreateInfo = {};
1661 allocCreateInfo.pool = hPool;
1662
1663 bufCreateInfo.size = BLOCK_SIZE / 2;
1664
1665 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
1666 for(size_t i = 0; i < bufCount; ++i)
1667 {
1668 Item item;
1669 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001670 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001671 items.push_back(item);
1672 }
1673 }
1674
1675 // 3. Allocation of one more (even small) buffer should fail.
1676 {
1677 VmaAllocationCreateInfo allocCreateInfo = {};
1678 allocCreateInfo.pool = hPool;
1679
1680 bufCreateInfo.size = 128;
1681
1682 VkBuffer hBuf;
1683 VmaAllocation hAlloc;
1684 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001685 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001686 }
1687
1688 // Destroy everything.
1689 for(size_t i = items.size(); i--; )
1690 {
1691 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
1692 }
1693
1694 vmaDestroyPool(hAllocator, hPool);
1695
1696 vmaDestroyAllocator(hAllocator);
1697}
1698
Adam Sawicki212a4a62018-06-14 15:44:45 +02001699#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02001700static void TestDebugMargin()
1701{
1702 if(VMA_DEBUG_MARGIN == 0)
1703 {
1704 return;
1705 }
1706
1707 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02001708 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02001709
1710 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02001711 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02001712
1713 // Create few buffers of different size.
1714 const size_t BUF_COUNT = 10;
1715 BufferInfo buffers[BUF_COUNT];
1716 VmaAllocationInfo allocInfo[BUF_COUNT];
1717 for(size_t i = 0; i < 10; ++i)
1718 {
1719 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02001720 // Last one will be mapped.
1721 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02001722
1723 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001724 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02001725 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001726 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001727
1728 if(i == BUF_COUNT - 1)
1729 {
1730 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001731 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001732 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
1733 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
1734 }
Adam Sawicki73b16652018-06-11 16:39:25 +02001735 }
1736
1737 // Check if their offsets preserve margin between them.
1738 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
1739 {
1740 if(lhs.deviceMemory != rhs.deviceMemory)
1741 {
1742 return lhs.deviceMemory < rhs.deviceMemory;
1743 }
1744 return lhs.offset < rhs.offset;
1745 });
1746 for(size_t i = 1; i < BUF_COUNT; ++i)
1747 {
1748 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
1749 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02001750 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02001751 }
1752 }
1753
Adam Sawicki212a4a62018-06-14 15:44:45 +02001754 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001755 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001756
Adam Sawicki73b16652018-06-11 16:39:25 +02001757 // Destroy all buffers.
1758 for(size_t i = BUF_COUNT; i--; )
1759 {
1760 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
1761 }
1762}
Adam Sawicki212a4a62018-06-14 15:44:45 +02001763#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02001764
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001765static void TestLinearAllocator()
1766{
1767 wprintf(L"Test linear allocator\n");
1768
1769 RandomNumberGenerator rand{645332};
1770
1771 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1772 sampleBufCreateInfo.size = 1024; // Whatever.
1773 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1774
1775 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
1776 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1777
1778 VmaPoolCreateInfo poolCreateInfo = {};
1779 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001780 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001781
Adam Sawickiee082772018-06-20 17:45:49 +02001782 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001783 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
1784 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
1785
1786 VmaPool pool = nullptr;
1787 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001788 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001789
1790 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
1791
1792 VmaAllocationCreateInfo allocCreateInfo = {};
1793 allocCreateInfo.pool = pool;
1794
1795 constexpr size_t maxBufCount = 100;
1796 std::vector<BufferInfo> bufInfo;
1797
1798 constexpr VkDeviceSize bufSizeMin = 16;
1799 constexpr VkDeviceSize bufSizeMax = 1024;
1800 VmaAllocationInfo allocInfo;
1801 VkDeviceSize prevOffset = 0;
1802
1803 // Test one-time free.
1804 for(size_t i = 0; i < 2; ++i)
1805 {
1806 // Allocate number of buffers of varying size that surely fit into this block.
1807 VkDeviceSize bufSumSize = 0;
1808 for(size_t i = 0; i < maxBufCount; ++i)
1809 {
1810 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1811 BufferInfo newBufInfo;
1812 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1813 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001814 TEST(res == VK_SUCCESS);
1815 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001816 bufInfo.push_back(newBufInfo);
1817 prevOffset = allocInfo.offset;
1818 bufSumSize += bufCreateInfo.size;
1819 }
1820
1821 // Validate pool stats.
1822 VmaPoolStats stats;
1823 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001824 TEST(stats.size == poolCreateInfo.blockSize);
1825 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
1826 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001827
1828 // Destroy the buffers in random order.
1829 while(!bufInfo.empty())
1830 {
1831 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
1832 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
1833 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1834 bufInfo.erase(bufInfo.begin() + indexToDestroy);
1835 }
1836 }
1837
1838 // Test stack.
1839 {
1840 // Allocate number of buffers of varying size that surely fit into this block.
1841 for(size_t i = 0; i < maxBufCount; ++i)
1842 {
1843 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1844 BufferInfo newBufInfo;
1845 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1846 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001847 TEST(res == VK_SUCCESS);
1848 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001849 bufInfo.push_back(newBufInfo);
1850 prevOffset = allocInfo.offset;
1851 }
1852
1853 // Destroy few buffers from top of the stack.
1854 for(size_t i = 0; i < maxBufCount / 5; ++i)
1855 {
1856 const BufferInfo& currBufInfo = bufInfo.back();
1857 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1858 bufInfo.pop_back();
1859 }
1860
1861 // Create some more
1862 for(size_t i = 0; i < maxBufCount / 5; ++i)
1863 {
1864 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1865 BufferInfo newBufInfo;
1866 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1867 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001868 TEST(res == VK_SUCCESS);
1869 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001870 bufInfo.push_back(newBufInfo);
1871 prevOffset = allocInfo.offset;
1872 }
1873
1874 // Destroy the buffers in reverse order.
1875 while(!bufInfo.empty())
1876 {
1877 const BufferInfo& currBufInfo = bufInfo.back();
1878 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1879 bufInfo.pop_back();
1880 }
1881 }
1882
Adam Sawickiee082772018-06-20 17:45:49 +02001883 // Test ring buffer.
1884 {
1885 // Allocate number of buffers that surely fit into this block.
1886 bufCreateInfo.size = bufSizeMax;
1887 for(size_t i = 0; i < maxBufCount; ++i)
1888 {
1889 BufferInfo newBufInfo;
1890 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1891 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001892 TEST(res == VK_SUCCESS);
1893 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02001894 bufInfo.push_back(newBufInfo);
1895 prevOffset = allocInfo.offset;
1896 }
1897
1898 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
1899 const size_t buffersPerIter = maxBufCount / 10 - 1;
1900 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
1901 for(size_t iter = 0; iter < iterCount; ++iter)
1902 {
1903 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
1904 {
1905 const BufferInfo& currBufInfo = bufInfo.front();
1906 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1907 bufInfo.erase(bufInfo.begin());
1908 }
1909 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
1910 {
1911 BufferInfo newBufInfo;
1912 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1913 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001914 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02001915 bufInfo.push_back(newBufInfo);
1916 }
1917 }
1918
1919 // Allocate buffers until we reach out-of-memory.
1920 uint32_t debugIndex = 0;
1921 while(res == VK_SUCCESS)
1922 {
1923 BufferInfo newBufInfo;
1924 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1925 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
1926 if(res == VK_SUCCESS)
1927 {
1928 bufInfo.push_back(newBufInfo);
1929 }
1930 else
1931 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02001932 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02001933 }
1934 ++debugIndex;
1935 }
1936
1937 // Destroy the buffers in random order.
1938 while(!bufInfo.empty())
1939 {
1940 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
1941 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
1942 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1943 bufInfo.erase(bufInfo.begin() + indexToDestroy);
1944 }
1945 }
1946
Adam Sawicki680b2252018-08-22 14:47:32 +02001947 // Test double stack.
1948 {
1949 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
1950 VkDeviceSize prevOffsetLower = 0;
1951 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
1952 for(size_t i = 0; i < maxBufCount; ++i)
1953 {
1954 const bool upperAddress = (i % 2) != 0;
1955 if(upperAddress)
1956 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
1957 else
1958 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
1959 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1960 BufferInfo newBufInfo;
1961 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1962 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001963 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02001964 if(upperAddress)
1965 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02001966 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02001967 prevOffsetUpper = allocInfo.offset;
1968 }
1969 else
1970 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02001971 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02001972 prevOffsetLower = allocInfo.offset;
1973 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001974 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02001975 bufInfo.push_back(newBufInfo);
1976 }
1977
1978 // Destroy few buffers from top of the stack.
1979 for(size_t i = 0; i < maxBufCount / 5; ++i)
1980 {
1981 const BufferInfo& currBufInfo = bufInfo.back();
1982 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1983 bufInfo.pop_back();
1984 }
1985
1986 // Create some more
1987 for(size_t i = 0; i < maxBufCount / 5; ++i)
1988 {
1989 const bool upperAddress = (i % 2) != 0;
1990 if(upperAddress)
1991 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
1992 else
1993 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
1994 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1995 BufferInfo newBufInfo;
1996 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1997 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001998 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02001999 bufInfo.push_back(newBufInfo);
2000 }
2001
2002 // Destroy the buffers in reverse order.
2003 while(!bufInfo.empty())
2004 {
2005 const BufferInfo& currBufInfo = bufInfo.back();
2006 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2007 bufInfo.pop_back();
2008 }
2009
2010 // Create buffers on both sides until we reach out of memory.
2011 prevOffsetLower = 0;
2012 prevOffsetUpper = poolCreateInfo.blockSize;
2013 res = VK_SUCCESS;
2014 for(size_t i = 0; res == VK_SUCCESS; ++i)
2015 {
2016 const bool upperAddress = (i % 2) != 0;
2017 if(upperAddress)
2018 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2019 else
2020 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2021 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2022 BufferInfo newBufInfo;
2023 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2024 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2025 if(res == VK_SUCCESS)
2026 {
2027 if(upperAddress)
2028 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002029 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002030 prevOffsetUpper = allocInfo.offset;
2031 }
2032 else
2033 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002034 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002035 prevOffsetLower = allocInfo.offset;
2036 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002037 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002038 bufInfo.push_back(newBufInfo);
2039 }
2040 }
2041
2042 // Destroy the buffers in random order.
2043 while(!bufInfo.empty())
2044 {
2045 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2046 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2047 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2048 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2049 }
2050
2051 // Create buffers on upper side only, constant size, until we reach out of memory.
2052 prevOffsetUpper = poolCreateInfo.blockSize;
2053 res = VK_SUCCESS;
2054 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2055 bufCreateInfo.size = bufSizeMax;
2056 for(size_t i = 0; res == VK_SUCCESS; ++i)
2057 {
2058 BufferInfo newBufInfo;
2059 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2060 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2061 if(res == VK_SUCCESS)
2062 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002063 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002064 prevOffsetUpper = allocInfo.offset;
2065 bufInfo.push_back(newBufInfo);
2066 }
2067 }
2068
2069 // Destroy the buffers in reverse order.
2070 while(!bufInfo.empty())
2071 {
2072 const BufferInfo& currBufInfo = bufInfo.back();
2073 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2074 bufInfo.pop_back();
2075 }
2076 }
2077
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002078 // Test ring buffer with lost allocations.
2079 {
2080 // Allocate number of buffers until pool is full.
2081 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2082 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2083 res = VK_SUCCESS;
2084 for(size_t i = 0; res == VK_SUCCESS; ++i)
2085 {
2086 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2087
2088 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2089
2090 BufferInfo newBufInfo;
2091 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2092 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2093 if(res == VK_SUCCESS)
2094 bufInfo.push_back(newBufInfo);
2095 }
2096
2097 // Free first half of it.
2098 {
2099 const size_t buffersToDelete = bufInfo.size() / 2;
2100 for(size_t i = 0; i < buffersToDelete; ++i)
2101 {
2102 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2103 }
2104 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2105 }
2106
2107 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002108 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002109 res = VK_SUCCESS;
2110 for(size_t i = 0; res == VK_SUCCESS; ++i)
2111 {
2112 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2113
2114 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2115
2116 BufferInfo newBufInfo;
2117 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2118 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2119 if(res == VK_SUCCESS)
2120 bufInfo.push_back(newBufInfo);
2121 }
2122
2123 VkDeviceSize firstNewOffset;
2124 {
2125 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2126
2127 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2128 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2129 bufCreateInfo.size = bufSizeMax;
2130
2131 BufferInfo newBufInfo;
2132 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2133 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002134 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002135 bufInfo.push_back(newBufInfo);
2136 firstNewOffset = allocInfo.offset;
2137
2138 // Make sure at least one buffer from the beginning became lost.
2139 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002140 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002141 }
2142
2143 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2144 size_t newCount = 1;
2145 for(;;)
2146 {
2147 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2148
2149 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2150
2151 BufferInfo newBufInfo;
2152 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2153 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002154 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002155 bufInfo.push_back(newBufInfo);
2156 ++newCount;
2157 if(allocInfo.offset < firstNewOffset)
2158 break;
2159 }
2160
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002161 // Delete buffers that are lost.
2162 for(size_t i = bufInfo.size(); i--; )
2163 {
2164 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2165 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2166 {
2167 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2168 bufInfo.erase(bufInfo.begin() + i);
2169 }
2170 }
2171
2172 // Test vmaMakePoolAllocationsLost
2173 {
2174 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2175
2176 size_t lostAllocCount = SIZE_MAX;
2177 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002178 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002179
2180 size_t realLostAllocCount = 0;
2181 for(size_t i = 0; i < bufInfo.size(); ++i)
2182 {
2183 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2184 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2185 ++realLostAllocCount;
2186 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002187 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002188 }
2189
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002190 // Destroy all the buffers in forward order.
2191 for(size_t i = 0; i < bufInfo.size(); ++i)
2192 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2193 bufInfo.clear();
2194 }
2195
Adam Sawicki70a683e2018-08-24 15:36:32 +02002196 vmaDestroyPool(g_hAllocator, pool);
2197}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002198
Adam Sawicki70a683e2018-08-24 15:36:32 +02002199static void TestLinearAllocatorMultiBlock()
2200{
2201 wprintf(L"Test linear allocator multi block\n");
2202
2203 RandomNumberGenerator rand{345673};
2204
2205 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2206 sampleBufCreateInfo.size = 1024 * 1024;
2207 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2208
2209 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2210 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2211
2212 VmaPoolCreateInfo poolCreateInfo = {};
2213 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2214 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002215 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002216
2217 VmaPool pool = nullptr;
2218 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002219 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002220
2221 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2222
2223 VmaAllocationCreateInfo allocCreateInfo = {};
2224 allocCreateInfo.pool = pool;
2225
2226 std::vector<BufferInfo> bufInfo;
2227 VmaAllocationInfo allocInfo;
2228
2229 // Test one-time free.
2230 {
2231 // Allocate buffers until we move to a second block.
2232 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2233 for(uint32_t i = 0; ; ++i)
2234 {
2235 BufferInfo newBufInfo;
2236 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2237 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002238 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002239 bufInfo.push_back(newBufInfo);
2240 if(lastMem && allocInfo.deviceMemory != lastMem)
2241 {
2242 break;
2243 }
2244 lastMem = allocInfo.deviceMemory;
2245 }
2246
Adam Sawickib8d34d52018-10-03 17:41:20 +02002247 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002248
2249 // Make sure that pool has now two blocks.
2250 VmaPoolStats poolStats = {};
2251 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002252 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002253
2254 // Destroy all the buffers in random order.
2255 while(!bufInfo.empty())
2256 {
2257 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2258 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2259 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2260 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2261 }
2262
2263 // Make sure that pool has now at most one block.
2264 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002265 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002266 }
2267
2268 // Test stack.
2269 {
2270 // Allocate buffers until we move to a second block.
2271 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2272 for(uint32_t i = 0; ; ++i)
2273 {
2274 BufferInfo newBufInfo;
2275 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2276 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002277 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002278 bufInfo.push_back(newBufInfo);
2279 if(lastMem && allocInfo.deviceMemory != lastMem)
2280 {
2281 break;
2282 }
2283 lastMem = allocInfo.deviceMemory;
2284 }
2285
Adam Sawickib8d34d52018-10-03 17:41:20 +02002286 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002287
2288 // Add few more buffers.
2289 for(uint32_t i = 0; i < 5; ++i)
2290 {
2291 BufferInfo newBufInfo;
2292 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2293 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002294 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002295 bufInfo.push_back(newBufInfo);
2296 }
2297
2298 // Make sure that pool has now two blocks.
2299 VmaPoolStats poolStats = {};
2300 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002301 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002302
2303 // Delete half of buffers, LIFO.
2304 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2305 {
2306 const BufferInfo& currBufInfo = bufInfo.back();
2307 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2308 bufInfo.pop_back();
2309 }
2310
2311 // Add one more buffer.
2312 BufferInfo newBufInfo;
2313 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2314 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002315 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002316 bufInfo.push_back(newBufInfo);
2317
2318 // Make sure that pool has now one block.
2319 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002320 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002321
2322 // Delete all the remaining buffers, LIFO.
2323 while(!bufInfo.empty())
2324 {
2325 const BufferInfo& currBufInfo = bufInfo.back();
2326 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2327 bufInfo.pop_back();
2328 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002329 }
2330
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002331 vmaDestroyPool(g_hAllocator, pool);
2332}
2333
Adam Sawickifd11d752018-08-22 15:02:10 +02002334static void ManuallyTestLinearAllocator()
2335{
2336 VmaStats origStats;
2337 vmaCalculateStats(g_hAllocator, &origStats);
2338
2339 wprintf(L"Manually test linear allocator\n");
2340
2341 RandomNumberGenerator rand{645332};
2342
2343 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2344 sampleBufCreateInfo.size = 1024; // Whatever.
2345 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2346
2347 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2348 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2349
2350 VmaPoolCreateInfo poolCreateInfo = {};
2351 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002352 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002353
2354 poolCreateInfo.blockSize = 10 * 1024;
2355 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2356 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2357
2358 VmaPool pool = nullptr;
2359 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002360 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002361
2362 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2363
2364 VmaAllocationCreateInfo allocCreateInfo = {};
2365 allocCreateInfo.pool = pool;
2366
2367 std::vector<BufferInfo> bufInfo;
2368 VmaAllocationInfo allocInfo;
2369 BufferInfo newBufInfo;
2370
2371 // Test double stack.
2372 {
2373 /*
2374 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2375 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2376
2377 Totally:
2378 1 block allocated
2379 10240 Vulkan bytes
2380 6 new allocations
2381 2256 bytes in allocations
2382 */
2383
2384 bufCreateInfo.size = 32;
2385 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2386 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002387 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002388 bufInfo.push_back(newBufInfo);
2389
2390 bufCreateInfo.size = 1024;
2391 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2392 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002393 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002394 bufInfo.push_back(newBufInfo);
2395
2396 bufCreateInfo.size = 32;
2397 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2398 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002399 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002400 bufInfo.push_back(newBufInfo);
2401
2402 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2403
2404 bufCreateInfo.size = 128;
2405 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2406 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002407 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002408 bufInfo.push_back(newBufInfo);
2409
2410 bufCreateInfo.size = 1024;
2411 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2412 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002413 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002414 bufInfo.push_back(newBufInfo);
2415
2416 bufCreateInfo.size = 16;
2417 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2418 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002419 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002420 bufInfo.push_back(newBufInfo);
2421
2422 VmaStats currStats;
2423 vmaCalculateStats(g_hAllocator, &currStats);
2424 VmaPoolStats poolStats;
2425 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2426
2427 char* statsStr = nullptr;
2428 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2429
2430 // PUT BREAKPOINT HERE TO CHECK.
2431 // Inspect: currStats versus origStats, poolStats, statsStr.
2432 int I = 0;
2433
2434 vmaFreeStatsString(g_hAllocator, statsStr);
2435
2436 // Destroy the buffers in reverse order.
2437 while(!bufInfo.empty())
2438 {
2439 const BufferInfo& currBufInfo = bufInfo.back();
2440 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2441 bufInfo.pop_back();
2442 }
2443 }
2444
2445 vmaDestroyPool(g_hAllocator, pool);
2446}
2447
Adam Sawicki80927152018-09-07 17:27:23 +02002448static void BenchmarkAlgorithmsCase(FILE* file,
2449 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002450 bool empty,
2451 VmaAllocationCreateFlags allocStrategy,
2452 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002453{
2454 RandomNumberGenerator rand{16223};
2455
2456 const VkDeviceSize bufSizeMin = 32;
2457 const VkDeviceSize bufSizeMax = 1024;
2458 const size_t maxBufCapacity = 10000;
2459 const uint32_t iterationCount = 10;
2460
2461 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2462 sampleBufCreateInfo.size = bufSizeMax;
2463 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2464
2465 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2466 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2467
2468 VmaPoolCreateInfo poolCreateInfo = {};
2469 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002470 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002471
2472 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002473 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002474 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2475
2476 VmaPool pool = nullptr;
2477 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002478 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002479
2480 // Buffer created just to get memory requirements. Never bound to any memory.
2481 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2482 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002483 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002484
2485 VkMemoryRequirements memReq = {};
2486 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2487
2488 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2489
2490 VmaAllocationCreateInfo allocCreateInfo = {};
2491 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002492 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002493
2494 VmaAllocation alloc;
2495 std::vector<VmaAllocation> baseAllocations;
2496
2497 if(!empty)
2498 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002499 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002500 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002501 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002502 {
2503 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2504 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002505 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002506 baseAllocations.push_back(alloc);
2507 totalSize += memReq.size;
2508 }
2509
2510 // Delete half of them, choose randomly.
2511 size_t allocsToDelete = baseAllocations.size() / 2;
2512 for(size_t i = 0; i < allocsToDelete; ++i)
2513 {
2514 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2515 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2516 baseAllocations.erase(baseAllocations.begin() + index);
2517 }
2518 }
2519
2520 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002521 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002522 std::vector<VmaAllocation> testAllocations;
2523 testAllocations.reserve(allocCount);
2524 duration allocTotalDuration = duration::zero();
2525 duration freeTotalDuration = duration::zero();
2526 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2527 {
2528 // Allocations
2529 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2530 for(size_t i = 0; i < allocCount; ++i)
2531 {
2532 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2533 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002534 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002535 testAllocations.push_back(alloc);
2536 }
2537 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2538
2539 // Deallocations
2540 switch(freeOrder)
2541 {
2542 case FREE_ORDER::FORWARD:
2543 // Leave testAllocations unchanged.
2544 break;
2545 case FREE_ORDER::BACKWARD:
2546 std::reverse(testAllocations.begin(), testAllocations.end());
2547 break;
2548 case FREE_ORDER::RANDOM:
2549 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2550 break;
2551 default: assert(0);
2552 }
2553
2554 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2555 for(size_t i = 0; i < allocCount; ++i)
2556 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2557 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2558
2559 testAllocations.clear();
2560 }
2561
2562 // Delete baseAllocations
2563 while(!baseAllocations.empty())
2564 {
2565 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2566 baseAllocations.pop_back();
2567 }
2568
2569 vmaDestroyPool(g_hAllocator, pool);
2570
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002571 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2572 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2573
Adam Sawicki80927152018-09-07 17:27:23 +02002574 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2575 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002576 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002577 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002578 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002579 allocTotalSeconds,
2580 freeTotalSeconds);
2581
2582 if(file)
2583 {
2584 std::string currTime;
2585 CurrentTimeToStr(currTime);
2586
Adam Sawicki80927152018-09-07 17:27:23 +02002587 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002588 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002589 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002590 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002591 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002592 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2593 allocTotalSeconds,
2594 freeTotalSeconds);
2595 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002596}
2597
Adam Sawicki80927152018-09-07 17:27:23 +02002598static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002599{
Adam Sawicki80927152018-09-07 17:27:23 +02002600 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002601
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002602 if(file)
2603 {
2604 fprintf(file,
2605 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002606 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002607 "Allocation time (s),Deallocation time (s)\n");
2608 }
2609
Adam Sawicki0a607132018-08-24 11:18:41 +02002610 uint32_t freeOrderCount = 1;
2611 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2612 freeOrderCount = 3;
2613 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2614 freeOrderCount = 2;
2615
2616 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002617 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002618
2619 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2620 {
2621 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2622 switch(freeOrderIndex)
2623 {
2624 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2625 case 1: freeOrder = FREE_ORDER::FORWARD; break;
2626 case 2: freeOrder = FREE_ORDER::RANDOM; break;
2627 default: assert(0);
2628 }
2629
2630 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
2631 {
Adam Sawicki80927152018-09-07 17:27:23 +02002632 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02002633 {
Adam Sawicki80927152018-09-07 17:27:23 +02002634 uint32_t algorithm = 0;
2635 switch(algorithmIndex)
2636 {
2637 case 0:
2638 break;
2639 case 1:
2640 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
2641 break;
2642 case 2:
2643 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2644 break;
2645 default:
2646 assert(0);
2647 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002648
Adam Sawicki80927152018-09-07 17:27:23 +02002649 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002650 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
2651 {
2652 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02002653 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002654 {
2655 switch(allocStrategyIndex)
2656 {
2657 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
2658 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
2659 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
2660 default: assert(0);
2661 }
2662 }
2663
Adam Sawicki80927152018-09-07 17:27:23 +02002664 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002665 file,
Adam Sawicki80927152018-09-07 17:27:23 +02002666 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002667 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002668 strategy,
2669 freeOrder); // freeOrder
2670 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002671 }
2672 }
2673 }
2674}
2675
Adam Sawickib8333fb2018-03-13 16:15:53 +01002676static void TestPool_SameSize()
2677{
2678 const VkDeviceSize BUF_SIZE = 1024 * 1024;
2679 const size_t BUF_COUNT = 100;
2680 VkResult res;
2681
2682 RandomNumberGenerator rand{123};
2683
2684 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2685 bufferInfo.size = BUF_SIZE;
2686 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2687
2688 uint32_t memoryTypeBits = UINT32_MAX;
2689 {
2690 VkBuffer dummyBuffer;
2691 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002692 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002693
2694 VkMemoryRequirements memReq;
2695 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2696 memoryTypeBits = memReq.memoryTypeBits;
2697
2698 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2699 }
2700
2701 VmaAllocationCreateInfo poolAllocInfo = {};
2702 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2703 uint32_t memTypeIndex;
2704 res = vmaFindMemoryTypeIndex(
2705 g_hAllocator,
2706 memoryTypeBits,
2707 &poolAllocInfo,
2708 &memTypeIndex);
2709
2710 VmaPoolCreateInfo poolCreateInfo = {};
2711 poolCreateInfo.memoryTypeIndex = memTypeIndex;
2712 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
2713 poolCreateInfo.minBlockCount = 1;
2714 poolCreateInfo.maxBlockCount = 4;
2715 poolCreateInfo.frameInUseCount = 0;
2716
2717 VmaPool pool;
2718 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002719 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002720
2721 vmaSetCurrentFrameIndex(g_hAllocator, 1);
2722
2723 VmaAllocationCreateInfo allocInfo = {};
2724 allocInfo.pool = pool;
2725 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
2726 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2727
2728 struct BufItem
2729 {
2730 VkBuffer Buf;
2731 VmaAllocation Alloc;
2732 };
2733 std::vector<BufItem> items;
2734
2735 // Fill entire pool.
2736 for(size_t i = 0; i < BUF_COUNT; ++i)
2737 {
2738 BufItem item;
2739 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002740 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002741 items.push_back(item);
2742 }
2743
2744 // Make sure that another allocation would fail.
2745 {
2746 BufItem item;
2747 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002748 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002749 }
2750
2751 // Validate that no buffer is lost. Also check that they are not mapped.
2752 for(size_t i = 0; i < items.size(); ++i)
2753 {
2754 VmaAllocationInfo allocInfo;
2755 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002756 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
2757 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002758 }
2759
2760 // Free some percent of random items.
2761 {
2762 const size_t PERCENT_TO_FREE = 10;
2763 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
2764 for(size_t i = 0; i < itemsToFree; ++i)
2765 {
2766 size_t index = (size_t)rand.Generate() % items.size();
2767 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
2768 items.erase(items.begin() + index);
2769 }
2770 }
2771
2772 // Randomly allocate and free items.
2773 {
2774 const size_t OPERATION_COUNT = BUF_COUNT;
2775 for(size_t i = 0; i < OPERATION_COUNT; ++i)
2776 {
2777 bool allocate = rand.Generate() % 2 != 0;
2778 if(allocate)
2779 {
2780 if(items.size() < BUF_COUNT)
2781 {
2782 BufItem item;
2783 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002784 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002785 items.push_back(item);
2786 }
2787 }
2788 else // Free
2789 {
2790 if(!items.empty())
2791 {
2792 size_t index = (size_t)rand.Generate() % items.size();
2793 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
2794 items.erase(items.begin() + index);
2795 }
2796 }
2797 }
2798 }
2799
2800 // Allocate up to maximum.
2801 while(items.size() < BUF_COUNT)
2802 {
2803 BufItem item;
2804 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002805 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002806 items.push_back(item);
2807 }
2808
2809 // Validate that no buffer is lost.
2810 for(size_t i = 0; i < items.size(); ++i)
2811 {
2812 VmaAllocationInfo allocInfo;
2813 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002814 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002815 }
2816
2817 // Next frame.
2818 vmaSetCurrentFrameIndex(g_hAllocator, 2);
2819
2820 // Allocate another BUF_COUNT buffers.
2821 for(size_t i = 0; i < BUF_COUNT; ++i)
2822 {
2823 BufItem item;
2824 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002825 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002826 items.push_back(item);
2827 }
2828
2829 // Make sure the first BUF_COUNT is lost. Delete them.
2830 for(size_t i = 0; i < BUF_COUNT; ++i)
2831 {
2832 VmaAllocationInfo allocInfo;
2833 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002834 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002835 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
2836 }
2837 items.erase(items.begin(), items.begin() + BUF_COUNT);
2838
2839 // Validate that no buffer is lost.
2840 for(size_t i = 0; i < items.size(); ++i)
2841 {
2842 VmaAllocationInfo allocInfo;
2843 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002844 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002845 }
2846
2847 // Free one item.
2848 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
2849 items.pop_back();
2850
2851 // Validate statistics.
2852 {
2853 VmaPoolStats poolStats = {};
2854 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002855 TEST(poolStats.allocationCount == items.size());
2856 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
2857 TEST(poolStats.unusedRangeCount == 1);
2858 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
2859 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002860 }
2861
2862 // Free all remaining items.
2863 for(size_t i = items.size(); i--; )
2864 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
2865 items.clear();
2866
2867 // Allocate maximum items again.
2868 for(size_t i = 0; i < BUF_COUNT; ++i)
2869 {
2870 BufItem item;
2871 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002872 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002873 items.push_back(item);
2874 }
2875
2876 // Delete every other item.
2877 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
2878 {
2879 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
2880 items.erase(items.begin() + i);
2881 }
2882
2883 // Defragment!
2884 {
2885 std::vector<VmaAllocation> allocationsToDefragment(items.size());
2886 for(size_t i = 0; i < items.size(); ++i)
2887 allocationsToDefragment[i] = items[i].Alloc;
2888
2889 VmaDefragmentationStats defragmentationStats;
2890 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002891 TEST(res == VK_SUCCESS);
2892 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002893 }
2894
2895 // Free all remaining items.
2896 for(size_t i = items.size(); i--; )
2897 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
2898 items.clear();
2899
2900 ////////////////////////////////////////////////////////////////////////////////
2901 // Test for vmaMakePoolAllocationsLost
2902
2903 // Allocate 4 buffers on frame 10.
2904 vmaSetCurrentFrameIndex(g_hAllocator, 10);
2905 for(size_t i = 0; i < 4; ++i)
2906 {
2907 BufItem item;
2908 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002909 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002910 items.push_back(item);
2911 }
2912
2913 // Touch first 2 of them on frame 11.
2914 vmaSetCurrentFrameIndex(g_hAllocator, 11);
2915 for(size_t i = 0; i < 2; ++i)
2916 {
2917 VmaAllocationInfo allocInfo;
2918 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
2919 }
2920
2921 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
2922 size_t lostCount = 0xDEADC0DE;
2923 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002924 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002925
2926 // Make another call. Now 0 should be lost.
2927 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002928 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002929
2930 // Make another call, with null count. Should not crash.
2931 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
2932
2933 // END: Free all remaining items.
2934 for(size_t i = items.size(); i--; )
2935 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
2936
2937 items.clear();
2938
Adam Sawickid2924172018-06-11 12:48:46 +02002939 ////////////////////////////////////////////////////////////////////////////////
2940 // Test for allocation too large for pool
2941
2942 {
2943 VmaAllocationCreateInfo allocCreateInfo = {};
2944 allocCreateInfo.pool = pool;
2945
2946 VkMemoryRequirements memReq;
2947 memReq.memoryTypeBits = UINT32_MAX;
2948 memReq.alignment = 1;
2949 memReq.size = poolCreateInfo.blockSize + 4;
2950
2951 VmaAllocation alloc = nullptr;
2952 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002953 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02002954 }
2955
Adam Sawickib8333fb2018-03-13 16:15:53 +01002956 vmaDestroyPool(g_hAllocator, pool);
2957}
2958
Adam Sawickie44c6262018-06-15 14:30:39 +02002959static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
2960{
2961 const uint8_t* pBytes = (const uint8_t*)pMemory;
2962 for(size_t i = 0; i < size; ++i)
2963 {
2964 if(pBytes[i] != pattern)
2965 {
2966 return false;
2967 }
2968 }
2969 return true;
2970}
2971
2972static void TestAllocationsInitialization()
2973{
2974 VkResult res;
2975
2976 const size_t BUF_SIZE = 1024;
2977
2978 // Create pool.
2979
2980 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2981 bufInfo.size = BUF_SIZE;
2982 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2983
2984 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
2985 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2986
2987 VmaPoolCreateInfo poolCreateInfo = {};
2988 poolCreateInfo.blockSize = BUF_SIZE * 10;
2989 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
2990 poolCreateInfo.maxBlockCount = 1;
2991 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002992 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02002993
2994 VmaAllocationCreateInfo bufAllocCreateInfo = {};
2995 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002996 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02002997
2998 // Create one persistently mapped buffer to keep memory of this block mapped,
2999 // so that pointer to mapped data will remain (more or less...) valid even
3000 // after destruction of other allocations.
3001
3002 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3003 VkBuffer firstBuf;
3004 VmaAllocation firstAlloc;
3005 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003006 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003007
3008 // Test buffers.
3009
3010 for(uint32_t i = 0; i < 2; ++i)
3011 {
3012 const bool persistentlyMapped = i == 0;
3013 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3014 VkBuffer buf;
3015 VmaAllocation alloc;
3016 VmaAllocationInfo allocInfo;
3017 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003018 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003019
3020 void* pMappedData;
3021 if(!persistentlyMapped)
3022 {
3023 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003024 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003025 }
3026 else
3027 {
3028 pMappedData = allocInfo.pMappedData;
3029 }
3030
3031 // Validate initialized content
3032 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003033 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003034
3035 if(!persistentlyMapped)
3036 {
3037 vmaUnmapMemory(g_hAllocator, alloc);
3038 }
3039
3040 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3041
3042 // Validate freed content
3043 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003044 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003045 }
3046
3047 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3048 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3049}
3050
Adam Sawickib8333fb2018-03-13 16:15:53 +01003051static void TestPool_Benchmark(
3052 PoolTestResult& outResult,
3053 const PoolTestConfig& config)
3054{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003055 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003056
3057 RandomNumberGenerator mainRand{config.RandSeed};
3058
3059 uint32_t allocationSizeProbabilitySum = std::accumulate(
3060 config.AllocationSizes.begin(),
3061 config.AllocationSizes.end(),
3062 0u,
3063 [](uint32_t sum, const AllocationSize& allocSize) {
3064 return sum + allocSize.Probability;
3065 });
3066
3067 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3068 bufferInfo.size = 256; // Whatever.
3069 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3070
3071 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3072 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3073 imageInfo.extent.width = 256; // Whatever.
3074 imageInfo.extent.height = 256; // Whatever.
3075 imageInfo.extent.depth = 1;
3076 imageInfo.mipLevels = 1;
3077 imageInfo.arrayLayers = 1;
3078 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3079 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3080 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3081 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3082 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3083
3084 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3085 {
3086 VkBuffer dummyBuffer;
3087 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003088 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003089
3090 VkMemoryRequirements memReq;
3091 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3092 bufferMemoryTypeBits = memReq.memoryTypeBits;
3093
3094 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3095 }
3096
3097 uint32_t imageMemoryTypeBits = UINT32_MAX;
3098 {
3099 VkImage dummyImage;
3100 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003101 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003102
3103 VkMemoryRequirements memReq;
3104 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3105 imageMemoryTypeBits = memReq.memoryTypeBits;
3106
3107 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3108 }
3109
3110 uint32_t memoryTypeBits = 0;
3111 if(config.UsesBuffers() && config.UsesImages())
3112 {
3113 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3114 if(memoryTypeBits == 0)
3115 {
3116 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3117 return;
3118 }
3119 }
3120 else if(config.UsesBuffers())
3121 memoryTypeBits = bufferMemoryTypeBits;
3122 else if(config.UsesImages())
3123 memoryTypeBits = imageMemoryTypeBits;
3124 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003125 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003126
3127 VmaPoolCreateInfo poolCreateInfo = {};
3128 poolCreateInfo.memoryTypeIndex = 0;
3129 poolCreateInfo.minBlockCount = 1;
3130 poolCreateInfo.maxBlockCount = 1;
3131 poolCreateInfo.blockSize = config.PoolSize;
3132 poolCreateInfo.frameInUseCount = 1;
3133
3134 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3135 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3136 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3137
3138 VmaPool pool;
3139 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003140 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003141
3142 // Start time measurement - after creating pool and initializing data structures.
3143 time_point timeBeg = std::chrono::high_resolution_clock::now();
3144
3145 ////////////////////////////////////////////////////////////////////////////////
3146 // ThreadProc
3147 auto ThreadProc = [&](
3148 PoolTestThreadResult* outThreadResult,
3149 uint32_t randSeed,
3150 HANDLE frameStartEvent,
3151 HANDLE frameEndEvent) -> void
3152 {
3153 RandomNumberGenerator threadRand{randSeed};
3154
3155 outThreadResult->AllocationTimeMin = duration::max();
3156 outThreadResult->AllocationTimeSum = duration::zero();
3157 outThreadResult->AllocationTimeMax = duration::min();
3158 outThreadResult->DeallocationTimeMin = duration::max();
3159 outThreadResult->DeallocationTimeSum = duration::zero();
3160 outThreadResult->DeallocationTimeMax = duration::min();
3161 outThreadResult->AllocationCount = 0;
3162 outThreadResult->DeallocationCount = 0;
3163 outThreadResult->LostAllocationCount = 0;
3164 outThreadResult->LostAllocationTotalSize = 0;
3165 outThreadResult->FailedAllocationCount = 0;
3166 outThreadResult->FailedAllocationTotalSize = 0;
3167
3168 struct Item
3169 {
3170 VkDeviceSize BufferSize;
3171 VkExtent2D ImageSize;
3172 VkBuffer Buf;
3173 VkImage Image;
3174 VmaAllocation Alloc;
3175
3176 VkDeviceSize CalcSizeBytes() const
3177 {
3178 return BufferSize +
3179 ImageSize.width * ImageSize.height * 4;
3180 }
3181 };
3182 std::vector<Item> unusedItems, usedItems;
3183
3184 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3185
3186 // Create all items - all unused, not yet allocated.
3187 for(size_t i = 0; i < threadTotalItemCount; ++i)
3188 {
3189 Item item = {};
3190
3191 uint32_t allocSizeIndex = 0;
3192 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3193 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3194 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3195
3196 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3197 if(allocSize.BufferSizeMax > 0)
3198 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003199 TEST(allocSize.BufferSizeMin > 0);
3200 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003201 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3202 item.BufferSize = allocSize.BufferSizeMin;
3203 else
3204 {
3205 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3206 item.BufferSize = item.BufferSize / 16 * 16;
3207 }
3208 }
3209 else
3210 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003211 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003212 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3213 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3214 else
3215 {
3216 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3217 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3218 }
3219 }
3220
3221 unusedItems.push_back(item);
3222 }
3223
3224 auto Allocate = [&](Item& item) -> VkResult
3225 {
3226 VmaAllocationCreateInfo allocCreateInfo = {};
3227 allocCreateInfo.pool = pool;
3228 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3229 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3230
3231 if(item.BufferSize)
3232 {
3233 bufferInfo.size = item.BufferSize;
3234 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3235 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3236 }
3237 else
3238 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003239 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003240
3241 imageInfo.extent.width = item.ImageSize.width;
3242 imageInfo.extent.height = item.ImageSize.height;
3243 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3244 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3245 }
3246 };
3247
3248 ////////////////////////////////////////////////////////////////////////////////
3249 // Frames
3250 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3251 {
3252 WaitForSingleObject(frameStartEvent, INFINITE);
3253
3254 // Always make some percent of used bufs unused, to choose different used ones.
3255 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3256 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3257 {
3258 size_t index = threadRand.Generate() % usedItems.size();
3259 unusedItems.push_back(usedItems[index]);
3260 usedItems.erase(usedItems.begin() + index);
3261 }
3262
3263 // Determine which bufs we want to use in this frame.
3264 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3265 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003266 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003267 // Move some used to unused.
3268 while(usedBufCount < usedItems.size())
3269 {
3270 size_t index = threadRand.Generate() % usedItems.size();
3271 unusedItems.push_back(usedItems[index]);
3272 usedItems.erase(usedItems.begin() + index);
3273 }
3274 // Move some unused to used.
3275 while(usedBufCount > usedItems.size())
3276 {
3277 size_t index = threadRand.Generate() % unusedItems.size();
3278 usedItems.push_back(unusedItems[index]);
3279 unusedItems.erase(unusedItems.begin() + index);
3280 }
3281
3282 uint32_t touchExistingCount = 0;
3283 uint32_t touchLostCount = 0;
3284 uint32_t createSucceededCount = 0;
3285 uint32_t createFailedCount = 0;
3286
3287 // Touch all used bufs. If not created or lost, allocate.
3288 for(size_t i = 0; i < usedItems.size(); ++i)
3289 {
3290 Item& item = usedItems[i];
3291 // Not yet created.
3292 if(item.Alloc == VK_NULL_HANDLE)
3293 {
3294 res = Allocate(item);
3295 ++outThreadResult->AllocationCount;
3296 if(res != VK_SUCCESS)
3297 {
3298 item.Alloc = VK_NULL_HANDLE;
3299 item.Buf = VK_NULL_HANDLE;
3300 ++outThreadResult->FailedAllocationCount;
3301 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3302 ++createFailedCount;
3303 }
3304 else
3305 ++createSucceededCount;
3306 }
3307 else
3308 {
3309 // Touch.
3310 VmaAllocationInfo allocInfo;
3311 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3312 // Lost.
3313 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3314 {
3315 ++touchLostCount;
3316
3317 // Destroy.
3318 {
3319 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3320 if(item.Buf)
3321 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3322 else
3323 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3324 ++outThreadResult->DeallocationCount;
3325 }
3326 item.Alloc = VK_NULL_HANDLE;
3327 item.Buf = VK_NULL_HANDLE;
3328
3329 ++outThreadResult->LostAllocationCount;
3330 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3331
3332 // Recreate.
3333 res = Allocate(item);
3334 ++outThreadResult->AllocationCount;
3335 // Creation failed.
3336 if(res != VK_SUCCESS)
3337 {
3338 ++outThreadResult->FailedAllocationCount;
3339 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3340 ++createFailedCount;
3341 }
3342 else
3343 ++createSucceededCount;
3344 }
3345 else
3346 ++touchExistingCount;
3347 }
3348 }
3349
3350 /*
3351 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3352 randSeed, frameIndex,
3353 touchExistingCount, touchLostCount,
3354 createSucceededCount, createFailedCount);
3355 */
3356
3357 SetEvent(frameEndEvent);
3358 }
3359
3360 // Free all remaining items.
3361 for(size_t i = usedItems.size(); i--; )
3362 {
3363 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3364 if(usedItems[i].Buf)
3365 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3366 else
3367 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3368 ++outThreadResult->DeallocationCount;
3369 }
3370 for(size_t i = unusedItems.size(); i--; )
3371 {
3372 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3373 if(unusedItems[i].Buf)
3374 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3375 else
3376 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3377 ++outThreadResult->DeallocationCount;
3378 }
3379 };
3380
3381 // Launch threads.
3382 uint32_t threadRandSeed = mainRand.Generate();
3383 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3384 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3385 std::vector<std::thread> bkgThreads;
3386 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3387 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3388 {
3389 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3390 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3391 bkgThreads.emplace_back(std::bind(
3392 ThreadProc,
3393 &threadResults[threadIndex],
3394 threadRandSeed + threadIndex,
3395 frameStartEvents[threadIndex],
3396 frameEndEvents[threadIndex]));
3397 }
3398
3399 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003400 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003401 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3402 {
3403 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3404 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3405 SetEvent(frameStartEvents[threadIndex]);
3406 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3407 }
3408
3409 // Wait for threads finished
3410 for(size_t i = 0; i < bkgThreads.size(); ++i)
3411 {
3412 bkgThreads[i].join();
3413 CloseHandle(frameEndEvents[i]);
3414 CloseHandle(frameStartEvents[i]);
3415 }
3416 bkgThreads.clear();
3417
3418 // Finish time measurement - before destroying pool.
3419 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3420
3421 vmaDestroyPool(g_hAllocator, pool);
3422
3423 outResult.AllocationTimeMin = duration::max();
3424 outResult.AllocationTimeAvg = duration::zero();
3425 outResult.AllocationTimeMax = duration::min();
3426 outResult.DeallocationTimeMin = duration::max();
3427 outResult.DeallocationTimeAvg = duration::zero();
3428 outResult.DeallocationTimeMax = duration::min();
3429 outResult.LostAllocationCount = 0;
3430 outResult.LostAllocationTotalSize = 0;
3431 outResult.FailedAllocationCount = 0;
3432 outResult.FailedAllocationTotalSize = 0;
3433 size_t allocationCount = 0;
3434 size_t deallocationCount = 0;
3435 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3436 {
3437 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3438 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3439 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3440 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3441 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3442 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3443 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3444 allocationCount += threadResult.AllocationCount;
3445 deallocationCount += threadResult.DeallocationCount;
3446 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3447 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3448 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3449 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3450 }
3451 if(allocationCount)
3452 outResult.AllocationTimeAvg /= allocationCount;
3453 if(deallocationCount)
3454 outResult.DeallocationTimeAvg /= deallocationCount;
3455}
3456
3457static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3458{
3459 if(ptr1 < ptr2)
3460 return ptr1 + size1 > ptr2;
3461 else if(ptr2 < ptr1)
3462 return ptr2 + size2 > ptr1;
3463 else
3464 return true;
3465}
3466
3467static void TestMapping()
3468{
3469 wprintf(L"Testing mapping...\n");
3470
3471 VkResult res;
3472 uint32_t memTypeIndex = UINT32_MAX;
3473
3474 enum TEST
3475 {
3476 TEST_NORMAL,
3477 TEST_POOL,
3478 TEST_DEDICATED,
3479 TEST_COUNT
3480 };
3481 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3482 {
3483 VmaPool pool = nullptr;
3484 if(testIndex == TEST_POOL)
3485 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003486 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003487 VmaPoolCreateInfo poolInfo = {};
3488 poolInfo.memoryTypeIndex = memTypeIndex;
3489 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003490 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003491 }
3492
3493 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3494 bufInfo.size = 0x10000;
3495 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3496
3497 VmaAllocationCreateInfo allocCreateInfo = {};
3498 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3499 allocCreateInfo.pool = pool;
3500 if(testIndex == TEST_DEDICATED)
3501 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3502
3503 VmaAllocationInfo allocInfo;
3504
3505 // Mapped manually
3506
3507 // Create 2 buffers.
3508 BufferInfo bufferInfos[3];
3509 for(size_t i = 0; i < 2; ++i)
3510 {
3511 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3512 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003513 TEST(res == VK_SUCCESS);
3514 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003515 memTypeIndex = allocInfo.memoryType;
3516 }
3517
3518 // Map buffer 0.
3519 char* data00 = nullptr;
3520 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003521 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003522 data00[0xFFFF] = data00[0];
3523
3524 // Map buffer 0 second time.
3525 char* data01 = nullptr;
3526 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003527 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003528
3529 // Map buffer 1.
3530 char* data1 = nullptr;
3531 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003532 TEST(res == VK_SUCCESS && data1 != nullptr);
3533 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01003534 data1[0xFFFF] = data1[0];
3535
3536 // Unmap buffer 0 two times.
3537 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3538 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3539 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003540 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003541
3542 // Unmap buffer 1.
3543 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
3544 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003545 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003546
3547 // Create 3rd buffer - persistently mapped.
3548 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3549 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3550 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003551 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003552
3553 // Map buffer 2.
3554 char* data2 = nullptr;
3555 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003556 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003557 data2[0xFFFF] = data2[0];
3558
3559 // Unmap buffer 2.
3560 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
3561 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003562 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003563
3564 // Destroy all buffers.
3565 for(size_t i = 3; i--; )
3566 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
3567
3568 vmaDestroyPool(g_hAllocator, pool);
3569 }
3570}
3571
3572static void TestMappingMultithreaded()
3573{
3574 wprintf(L"Testing mapping multithreaded...\n");
3575
3576 static const uint32_t threadCount = 16;
3577 static const uint32_t bufferCount = 1024;
3578 static const uint32_t threadBufferCount = bufferCount / threadCount;
3579
3580 VkResult res;
3581 volatile uint32_t memTypeIndex = UINT32_MAX;
3582
3583 enum TEST
3584 {
3585 TEST_NORMAL,
3586 TEST_POOL,
3587 TEST_DEDICATED,
3588 TEST_COUNT
3589 };
3590 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3591 {
3592 VmaPool pool = nullptr;
3593 if(testIndex == TEST_POOL)
3594 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003595 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003596 VmaPoolCreateInfo poolInfo = {};
3597 poolInfo.memoryTypeIndex = memTypeIndex;
3598 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003599 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003600 }
3601
3602 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3603 bufCreateInfo.size = 0x10000;
3604 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3605
3606 VmaAllocationCreateInfo allocCreateInfo = {};
3607 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3608 allocCreateInfo.pool = pool;
3609 if(testIndex == TEST_DEDICATED)
3610 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3611
3612 std::thread threads[threadCount];
3613 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
3614 {
3615 threads[threadIndex] = std::thread([=, &memTypeIndex](){
3616 // ======== THREAD FUNCTION ========
3617
3618 RandomNumberGenerator rand{threadIndex};
3619
3620 enum class MODE
3621 {
3622 // Don't map this buffer at all.
3623 DONT_MAP,
3624 // Map and quickly unmap.
3625 MAP_FOR_MOMENT,
3626 // Map and unmap before destruction.
3627 MAP_FOR_LONGER,
3628 // Map two times. Quickly unmap, second unmap before destruction.
3629 MAP_TWO_TIMES,
3630 // Create this buffer as persistently mapped.
3631 PERSISTENTLY_MAPPED,
3632 COUNT
3633 };
3634 std::vector<BufferInfo> bufInfos{threadBufferCount};
3635 std::vector<MODE> bufModes{threadBufferCount};
3636
3637 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
3638 {
3639 BufferInfo& bufInfo = bufInfos[bufferIndex];
3640 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
3641 bufModes[bufferIndex] = mode;
3642
3643 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
3644 if(mode == MODE::PERSISTENTLY_MAPPED)
3645 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3646
3647 VmaAllocationInfo allocInfo;
3648 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
3649 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003650 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003651
3652 if(memTypeIndex == UINT32_MAX)
3653 memTypeIndex = allocInfo.memoryType;
3654
3655 char* data = nullptr;
3656
3657 if(mode == MODE::PERSISTENTLY_MAPPED)
3658 {
3659 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003660 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003661 }
3662 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
3663 mode == MODE::MAP_TWO_TIMES)
3664 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003665 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003666 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003667 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003668
3669 if(mode == MODE::MAP_TWO_TIMES)
3670 {
3671 char* data2 = nullptr;
3672 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003673 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003674 }
3675 }
3676 else if(mode == MODE::DONT_MAP)
3677 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003678 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003679 }
3680 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003681 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003682
3683 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
3684 if(data)
3685 data[0xFFFF] = data[0];
3686
3687 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
3688 {
3689 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
3690
3691 VmaAllocationInfo allocInfo;
3692 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
3693 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02003694 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003695 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003696 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003697 }
3698
3699 switch(rand.Generate() % 3)
3700 {
3701 case 0: Sleep(0); break; // Yield.
3702 case 1: Sleep(10); break; // 10 ms
3703 // default: No sleep.
3704 }
3705
3706 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
3707 if(data)
3708 data[0xFFFF] = data[0];
3709 }
3710
3711 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
3712 {
3713 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
3714 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
3715 {
3716 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
3717
3718 VmaAllocationInfo allocInfo;
3719 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003720 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003721 }
3722
3723 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
3724 }
3725 });
3726 }
3727
3728 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
3729 threads[threadIndex].join();
3730
3731 vmaDestroyPool(g_hAllocator, pool);
3732 }
3733}
3734
3735static void WriteMainTestResultHeader(FILE* file)
3736{
3737 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02003738 "Code,Time,"
3739 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01003740 "Total Time (us),"
3741 "Allocation Time Min (us),"
3742 "Allocation Time Avg (us),"
3743 "Allocation Time Max (us),"
3744 "Deallocation Time Min (us),"
3745 "Deallocation Time Avg (us),"
3746 "Deallocation Time Max (us),"
3747 "Total Memory Allocated (B),"
3748 "Free Range Size Avg (B),"
3749 "Free Range Size Max (B)\n");
3750}
3751
3752static void WriteMainTestResult(
3753 FILE* file,
3754 const char* codeDescription,
3755 const char* testDescription,
3756 const Config& config, const Result& result)
3757{
3758 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
3759 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
3760 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
3761 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
3762 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
3763 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
3764 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
3765
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003766 std::string currTime;
3767 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003768
3769 fprintf(file,
3770 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01003771 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
3772 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003773 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02003774 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01003775 totalTimeSeconds * 1e6f,
3776 allocationTimeMinSeconds * 1e6f,
3777 allocationTimeAvgSeconds * 1e6f,
3778 allocationTimeMaxSeconds * 1e6f,
3779 deallocationTimeMinSeconds * 1e6f,
3780 deallocationTimeAvgSeconds * 1e6f,
3781 deallocationTimeMaxSeconds * 1e6f,
3782 result.TotalMemoryAllocated,
3783 result.FreeRangeSizeAvg,
3784 result.FreeRangeSizeMax);
3785}
3786
3787static void WritePoolTestResultHeader(FILE* file)
3788{
3789 fprintf(file,
3790 "Code,Test,Time,"
3791 "Config,"
3792 "Total Time (us),"
3793 "Allocation Time Min (us),"
3794 "Allocation Time Avg (us),"
3795 "Allocation Time Max (us),"
3796 "Deallocation Time Min (us),"
3797 "Deallocation Time Avg (us),"
3798 "Deallocation Time Max (us),"
3799 "Lost Allocation Count,"
3800 "Lost Allocation Total Size (B),"
3801 "Failed Allocation Count,"
3802 "Failed Allocation Total Size (B)\n");
3803}
3804
3805static void WritePoolTestResult(
3806 FILE* file,
3807 const char* codeDescription,
3808 const char* testDescription,
3809 const PoolTestConfig& config,
3810 const PoolTestResult& result)
3811{
3812 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
3813 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
3814 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
3815 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
3816 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
3817 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
3818 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
3819
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003820 std::string currTime;
3821 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003822
3823 fprintf(file,
3824 "%s,%s,%s,"
3825 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
3826 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
3827 // General
3828 codeDescription,
3829 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003830 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01003831 // Config
3832 config.ThreadCount,
3833 (unsigned long long)config.PoolSize,
3834 config.FrameCount,
3835 config.TotalItemCount,
3836 config.UsedItemCountMin,
3837 config.UsedItemCountMax,
3838 config.ItemsToMakeUnusedPercent,
3839 // Results
3840 totalTimeSeconds * 1e6f,
3841 allocationTimeMinSeconds * 1e6f,
3842 allocationTimeAvgSeconds * 1e6f,
3843 allocationTimeMaxSeconds * 1e6f,
3844 deallocationTimeMinSeconds * 1e6f,
3845 deallocationTimeAvgSeconds * 1e6f,
3846 deallocationTimeMaxSeconds * 1e6f,
3847 result.LostAllocationCount,
3848 result.LostAllocationTotalSize,
3849 result.FailedAllocationCount,
3850 result.FailedAllocationTotalSize);
3851}
3852
3853static void PerformCustomMainTest(FILE* file)
3854{
3855 Config config{};
3856 config.RandSeed = 65735476;
3857 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
3858 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
3859 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
3860 config.FreeOrder = FREE_ORDER::FORWARD;
3861 config.ThreadCount = 16;
3862 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02003863 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01003864
3865 // Buffers
3866 //config.AllocationSizes.push_back({4, 16, 1024});
3867 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
3868
3869 // Images
3870 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
3871 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
3872
3873 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
3874 config.AdditionalOperationCount = 1024;
3875
3876 Result result{};
3877 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003878 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003879 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
3880}
3881
3882static void PerformCustomPoolTest(FILE* file)
3883{
3884 PoolTestConfig config;
3885 config.PoolSize = 100 * 1024 * 1024;
3886 config.RandSeed = 2345764;
3887 config.ThreadCount = 1;
3888 config.FrameCount = 200;
3889 config.ItemsToMakeUnusedPercent = 2;
3890
3891 AllocationSize allocSize = {};
3892 allocSize.BufferSizeMin = 1024;
3893 allocSize.BufferSizeMax = 1024 * 1024;
3894 allocSize.Probability = 1;
3895 config.AllocationSizes.push_back(allocSize);
3896
3897 allocSize.BufferSizeMin = 0;
3898 allocSize.BufferSizeMax = 0;
3899 allocSize.ImageSizeMin = 128;
3900 allocSize.ImageSizeMax = 1024;
3901 allocSize.Probability = 1;
3902 config.AllocationSizes.push_back(allocSize);
3903
3904 config.PoolSize = config.CalcAvgResourceSize() * 200;
3905 config.UsedItemCountMax = 160;
3906 config.TotalItemCount = config.UsedItemCountMax * 10;
3907 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
3908
3909 g_MemoryAliasingWarningEnabled = false;
3910 PoolTestResult result = {};
3911 TestPool_Benchmark(result, config);
3912 g_MemoryAliasingWarningEnabled = true;
3913
3914 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
3915}
3916
Adam Sawickib8333fb2018-03-13 16:15:53 +01003917static void PerformMainTests(FILE* file)
3918{
3919 uint32_t repeatCount = 1;
3920 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
3921
3922 Config config{};
3923 config.RandSeed = 65735476;
3924 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
3925 config.FreeOrder = FREE_ORDER::FORWARD;
3926
3927 size_t threadCountCount = 1;
3928 switch(ConfigType)
3929 {
3930 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
3931 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
3932 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
3933 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
3934 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
3935 default: assert(0);
3936 }
Adam Sawicki0667e332018-08-24 17:26:44 +02003937
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003938 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02003939
Adam Sawickib8333fb2018-03-13 16:15:53 +01003940 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
3941 {
3942 std::string desc1;
3943
3944 switch(threadCountIndex)
3945 {
3946 case 0:
3947 desc1 += "1_thread";
3948 config.ThreadCount = 1;
3949 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
3950 break;
3951 case 1:
3952 desc1 += "16_threads+0%_common";
3953 config.ThreadCount = 16;
3954 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
3955 break;
3956 case 2:
3957 desc1 += "16_threads+50%_common";
3958 config.ThreadCount = 16;
3959 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
3960 break;
3961 case 3:
3962 desc1 += "16_threads+100%_common";
3963 config.ThreadCount = 16;
3964 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
3965 break;
3966 case 4:
3967 desc1 += "2_threads+0%_common";
3968 config.ThreadCount = 2;
3969 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
3970 break;
3971 case 5:
3972 desc1 += "2_threads+50%_common";
3973 config.ThreadCount = 2;
3974 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
3975 break;
3976 case 6:
3977 desc1 += "2_threads+100%_common";
3978 config.ThreadCount = 2;
3979 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
3980 break;
3981 default:
3982 assert(0);
3983 }
3984
3985 // 0 = buffers, 1 = images, 2 = buffers and images
3986 size_t buffersVsImagesCount = 2;
3987 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
3988 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
3989 {
3990 std::string desc2 = desc1;
3991 switch(buffersVsImagesIndex)
3992 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02003993 case 0: desc2 += ",Buffers"; break;
3994 case 1: desc2 += ",Images"; break;
3995 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01003996 default: assert(0);
3997 }
3998
3999 // 0 = small, 1 = large, 2 = small and large
4000 size_t smallVsLargeCount = 2;
4001 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4002 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4003 {
4004 std::string desc3 = desc2;
4005 switch(smallVsLargeIndex)
4006 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004007 case 0: desc3 += ",Small"; break;
4008 case 1: desc3 += ",Large"; break;
4009 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004010 default: assert(0);
4011 }
4012
4013 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4014 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4015 else
4016 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4017
4018 // 0 = varying sizes min...max, 1 = set of constant sizes
4019 size_t constantSizesCount = 1;
4020 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4021 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4022 {
4023 std::string desc4 = desc3;
4024 switch(constantSizesIndex)
4025 {
4026 case 0: desc4 += " Varying_sizes"; break;
4027 case 1: desc4 += " Constant_sizes"; break;
4028 default: assert(0);
4029 }
4030
4031 config.AllocationSizes.clear();
4032 // Buffers present
4033 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4034 {
4035 // Small
4036 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4037 {
4038 // Varying size
4039 if(constantSizesIndex == 0)
4040 config.AllocationSizes.push_back({4, 16, 1024});
4041 // Constant sizes
4042 else
4043 {
4044 config.AllocationSizes.push_back({1, 16, 16});
4045 config.AllocationSizes.push_back({1, 64, 64});
4046 config.AllocationSizes.push_back({1, 256, 256});
4047 config.AllocationSizes.push_back({1, 1024, 1024});
4048 }
4049 }
4050 // Large
4051 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4052 {
4053 // Varying size
4054 if(constantSizesIndex == 0)
4055 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4056 // Constant sizes
4057 else
4058 {
4059 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4060 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4061 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4062 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4063 }
4064 }
4065 }
4066 // Images present
4067 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4068 {
4069 // Small
4070 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4071 {
4072 // Varying size
4073 if(constantSizesIndex == 0)
4074 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4075 // Constant sizes
4076 else
4077 {
4078 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4079 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4080 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4081 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4082 }
4083 }
4084 // Large
4085 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4086 {
4087 // Varying size
4088 if(constantSizesIndex == 0)
4089 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4090 // Constant sizes
4091 else
4092 {
4093 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4094 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4095 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4096 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4097 }
4098 }
4099 }
4100
4101 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4102 size_t beginBytesToAllocateCount = 1;
4103 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4104 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4105 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4106 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4107 {
4108 std::string desc5 = desc4;
4109
4110 switch(beginBytesToAllocateIndex)
4111 {
4112 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004113 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004114 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4115 config.AdditionalOperationCount = 0;
4116 break;
4117 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004118 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004119 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4120 config.AdditionalOperationCount = 1024;
4121 break;
4122 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004123 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004124 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4125 config.AdditionalOperationCount = 1024;
4126 break;
4127 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004128 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004129 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4130 config.AdditionalOperationCount = 1024;
4131 break;
4132 default:
4133 assert(0);
4134 }
4135
Adam Sawicki0667e332018-08-24 17:26:44 +02004136 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004137 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004138 std::string desc6 = desc5;
4139 switch(strategyIndex)
4140 {
4141 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004142 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004143 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4144 break;
4145 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004146 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004147 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4148 break;
4149 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004150 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004151 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4152 break;
4153 default:
4154 assert(0);
4155 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004156
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004157 desc6 += ',';
4158 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004159
4160 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004161
4162 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4163 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004164 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004165
4166 Result result{};
4167 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004168 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004169 if(file)
4170 {
4171 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4172 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004173 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004174 }
4175 }
4176 }
4177 }
4178 }
4179 }
4180}
4181
4182static void PerformPoolTests(FILE* file)
4183{
4184 const size_t AVG_RESOURCES_PER_POOL = 300;
4185
4186 uint32_t repeatCount = 1;
4187 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4188
4189 PoolTestConfig config{};
4190 config.RandSeed = 2346343;
4191 config.FrameCount = 200;
4192 config.ItemsToMakeUnusedPercent = 2;
4193
4194 size_t threadCountCount = 1;
4195 switch(ConfigType)
4196 {
4197 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4198 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4199 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4200 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4201 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4202 default: assert(0);
4203 }
4204 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4205 {
4206 std::string desc1;
4207
4208 switch(threadCountIndex)
4209 {
4210 case 0:
4211 desc1 += "1_thread";
4212 config.ThreadCount = 1;
4213 break;
4214 case 1:
4215 desc1 += "16_threads";
4216 config.ThreadCount = 16;
4217 break;
4218 case 2:
4219 desc1 += "2_threads";
4220 config.ThreadCount = 2;
4221 break;
4222 default:
4223 assert(0);
4224 }
4225
4226 // 0 = buffers, 1 = images, 2 = buffers and images
4227 size_t buffersVsImagesCount = 2;
4228 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4229 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4230 {
4231 std::string desc2 = desc1;
4232 switch(buffersVsImagesIndex)
4233 {
4234 case 0: desc2 += " Buffers"; break;
4235 case 1: desc2 += " Images"; break;
4236 case 2: desc2 += " Buffers+Images"; break;
4237 default: assert(0);
4238 }
4239
4240 // 0 = small, 1 = large, 2 = small and large
4241 size_t smallVsLargeCount = 2;
4242 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4243 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4244 {
4245 std::string desc3 = desc2;
4246 switch(smallVsLargeIndex)
4247 {
4248 case 0: desc3 += " Small"; break;
4249 case 1: desc3 += " Large"; break;
4250 case 2: desc3 += " Small+Large"; break;
4251 default: assert(0);
4252 }
4253
4254 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4255 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4256 else
4257 config.PoolSize = 4ull * 1024 * 1024;
4258
4259 // 0 = varying sizes min...max, 1 = set of constant sizes
4260 size_t constantSizesCount = 1;
4261 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4262 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4263 {
4264 std::string desc4 = desc3;
4265 switch(constantSizesIndex)
4266 {
4267 case 0: desc4 += " Varying_sizes"; break;
4268 case 1: desc4 += " Constant_sizes"; break;
4269 default: assert(0);
4270 }
4271
4272 config.AllocationSizes.clear();
4273 // Buffers present
4274 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4275 {
4276 // Small
4277 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4278 {
4279 // Varying size
4280 if(constantSizesIndex == 0)
4281 config.AllocationSizes.push_back({4, 16, 1024});
4282 // Constant sizes
4283 else
4284 {
4285 config.AllocationSizes.push_back({1, 16, 16});
4286 config.AllocationSizes.push_back({1, 64, 64});
4287 config.AllocationSizes.push_back({1, 256, 256});
4288 config.AllocationSizes.push_back({1, 1024, 1024});
4289 }
4290 }
4291 // Large
4292 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4293 {
4294 // Varying size
4295 if(constantSizesIndex == 0)
4296 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4297 // Constant sizes
4298 else
4299 {
4300 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4301 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4302 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4303 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4304 }
4305 }
4306 }
4307 // Images present
4308 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4309 {
4310 // Small
4311 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4312 {
4313 // Varying size
4314 if(constantSizesIndex == 0)
4315 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4316 // Constant sizes
4317 else
4318 {
4319 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4320 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4321 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4322 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4323 }
4324 }
4325 // Large
4326 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4327 {
4328 // Varying size
4329 if(constantSizesIndex == 0)
4330 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4331 // Constant sizes
4332 else
4333 {
4334 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4335 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4336 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4337 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4338 }
4339 }
4340 }
4341
4342 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4343 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4344
4345 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4346 size_t subscriptionModeCount;
4347 switch(ConfigType)
4348 {
4349 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4350 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4351 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4352 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4353 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4354 default: assert(0);
4355 }
4356 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4357 {
4358 std::string desc5 = desc4;
4359
4360 switch(subscriptionModeIndex)
4361 {
4362 case 0:
4363 desc5 += " Subscription_66%";
4364 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4365 break;
4366 case 1:
4367 desc5 += " Subscription_133%";
4368 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4369 break;
4370 case 2:
4371 desc5 += " Subscription_100%";
4372 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4373 break;
4374 case 3:
4375 desc5 += " Subscription_33%";
4376 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4377 break;
4378 case 4:
4379 desc5 += " Subscription_166%";
4380 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4381 break;
4382 default:
4383 assert(0);
4384 }
4385
4386 config.TotalItemCount = config.UsedItemCountMax * 5;
4387 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4388
4389 const char* testDescription = desc5.c_str();
4390
4391 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4392 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004393 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004394
4395 PoolTestResult result{};
4396 g_MemoryAliasingWarningEnabled = false;
4397 TestPool_Benchmark(result, config);
4398 g_MemoryAliasingWarningEnabled = true;
4399 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4400 }
4401 }
4402 }
4403 }
4404 }
4405 }
4406}
4407
Adam Sawickia83793a2018-09-03 13:40:42 +02004408static void BasicTestBuddyAllocator()
4409{
4410 wprintf(L"Basic test buddy allocator\n");
4411
4412 RandomNumberGenerator rand{76543};
4413
4414 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4415 sampleBufCreateInfo.size = 1024; // Whatever.
4416 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4417
4418 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4419 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4420
4421 VmaPoolCreateInfo poolCreateInfo = {};
4422 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004423 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004424
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004425 // Deliberately adding 1023 to test usable size smaller than memory block size.
4426 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004427 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004428 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004429
4430 VmaPool pool = nullptr;
4431 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004432 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004433
4434 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4435
4436 VmaAllocationCreateInfo allocCreateInfo = {};
4437 allocCreateInfo.pool = pool;
4438
4439 std::vector<BufferInfo> bufInfo;
4440 BufferInfo newBufInfo;
4441 VmaAllocationInfo allocInfo;
4442
4443 bufCreateInfo.size = 1024 * 256;
4444 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4445 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004446 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004447 bufInfo.push_back(newBufInfo);
4448
4449 bufCreateInfo.size = 1024 * 512;
4450 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4451 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004452 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004453 bufInfo.push_back(newBufInfo);
4454
4455 bufCreateInfo.size = 1024 * 128;
4456 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4457 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004458 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004459 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004460
4461 // Test very small allocation, smaller than minimum node size.
4462 bufCreateInfo.size = 1;
4463 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4464 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004465 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004466 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004467
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004468 // Test some small allocation with alignment requirement.
4469 {
4470 VkMemoryRequirements memReq;
4471 memReq.alignment = 256;
4472 memReq.memoryTypeBits = UINT32_MAX;
4473 memReq.size = 32;
4474
4475 newBufInfo.Buffer = VK_NULL_HANDLE;
4476 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4477 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004478 TEST(res == VK_SUCCESS);
4479 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004480 bufInfo.push_back(newBufInfo);
4481 }
4482
4483 //SaveAllocatorStatsToFile(L"TEST.json");
4484
Adam Sawicki21017c62018-09-07 15:26:59 +02004485 VmaPoolStats stats = {};
4486 vmaGetPoolStats(g_hAllocator, pool, &stats);
4487 int DBG = 0; // Set breakpoint here to inspect `stats`.
4488
Adam Sawicki80927152018-09-07 17:27:23 +02004489 // Allocate enough new buffers to surely fall into second block.
4490 for(uint32_t i = 0; i < 32; ++i)
4491 {
4492 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4493 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4494 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004495 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004496 bufInfo.push_back(newBufInfo);
4497 }
4498
4499 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4500
Adam Sawickia83793a2018-09-03 13:40:42 +02004501 // Destroy the buffers in random order.
4502 while(!bufInfo.empty())
4503 {
4504 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4505 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4506 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4507 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4508 }
4509
4510 vmaDestroyPool(g_hAllocator, pool);
4511}
4512
Adam Sawickif2975342018-10-16 13:49:02 +02004513// Test the testing environment.
4514static void TestGpuData()
4515{
4516 RandomNumberGenerator rand = { 53434 };
4517
4518 std::vector<AllocInfo> allocInfo;
4519
4520 for(size_t i = 0; i < 100; ++i)
4521 {
4522 AllocInfo info = {};
4523
4524 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4525 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
4526 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
4527 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4528 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
4529
4530 VmaAllocationCreateInfo allocCreateInfo = {};
4531 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4532
4533 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
4534 TEST(res == VK_SUCCESS);
4535
4536 info.m_StartValue = rand.Generate();
4537
4538 allocInfo.push_back(std::move(info));
4539 }
4540
4541 UploadGpuData(allocInfo.data(), allocInfo.size());
4542
4543 ValidateGpuData(allocInfo.data(), allocInfo.size());
4544
4545 DestroyAllAllocations(allocInfo);
4546}
4547
Adam Sawickib8333fb2018-03-13 16:15:53 +01004548void Test()
4549{
4550 wprintf(L"TESTING:\n");
4551
Adam Sawickif2975342018-10-16 13:49:02 +02004552 if(true)
Adam Sawicki70a683e2018-08-24 15:36:32 +02004553 {
4554 // # Temporarily insert custom tests here
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004555 // ########################################
4556 // ########################################
Adam Sawicki80927152018-09-07 17:27:23 +02004557
Adam Sawickif2975342018-10-16 13:49:02 +02004558 //TestDefragmentationSimple();
4559 //TestDefragmentationFull();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004560 return;
4561 }
4562
Adam Sawickib8333fb2018-03-13 16:15:53 +01004563 // # Simple tests
4564
4565 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02004566 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02004567#if VMA_DEBUG_MARGIN
4568 TestDebugMargin();
4569#else
4570 TestPool_SameSize();
4571 TestHeapSizeLimit();
4572#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02004573#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
4574 TestAllocationsInitialization();
4575#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01004576 TestMapping();
4577 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02004578 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02004579 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004580 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004581
Adam Sawicki4338f662018-09-07 14:12:37 +02004582 BasicTestBuddyAllocator();
4583
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004584 {
4585 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02004586 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004587 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02004588 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004589 fclose(file);
4590 }
4591
Adam Sawickib8333fb2018-03-13 16:15:53 +01004592 TestDefragmentationSimple();
4593 TestDefragmentationFull();
4594
4595 // # Detailed tests
4596 FILE* file;
4597 fopen_s(&file, "Results.csv", "w");
4598 assert(file != NULL);
4599
4600 WriteMainTestResultHeader(file);
4601 PerformMainTests(file);
4602 //PerformCustomMainTest(file);
4603
4604 WritePoolTestResultHeader(file);
4605 PerformPoolTests(file);
4606 //PerformCustomPoolTest(file);
4607
4608 fclose(file);
4609
4610 wprintf(L"Done.\n");
4611}
4612
Adam Sawickif1a793c2018-03-13 15:42:22 +01004613#endif // #ifdef _WIN32