blob: db747528b6360edd2f3ddf3a2a031d4a23436df5 [file] [log] [blame]
Adam Sawickif1a793c2018-03-13 15:42:22 +01001#include "Tests.h"
2#include "VmaUsage.h"
3#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004#include <atomic>
5#include <thread>
6#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +01007
8#ifdef _WIN32
9
Adam Sawicki33d2ce72018-08-27 13:59:13 +020010static const char* CODE_DESCRIPTION = "Foo";
11
Adam Sawickif2975342018-10-16 13:49:02 +020012extern VkCommandBuffer g_hTemporaryCommandBuffer;
13void BeginSingleTimeCommands();
14void EndSingleTimeCommands();
15
Adam Sawicki0a607132018-08-24 11:18:41 +020016enum CONFIG_TYPE {
17 CONFIG_TYPE_MINIMUM,
18 CONFIG_TYPE_SMALL,
19 CONFIG_TYPE_AVERAGE,
20 CONFIG_TYPE_LARGE,
21 CONFIG_TYPE_MAXIMUM,
22 CONFIG_TYPE_COUNT
23};
24
Adam Sawickif2975342018-10-16 13:49:02 +020025static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
26//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020027
Adam Sawickib8333fb2018-03-13 16:15:53 +010028enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
29
Adam Sawicki0667e332018-08-24 17:26:44 +020030static const char* FREE_ORDER_NAMES[] = {
31 "FORWARD",
32 "BACKWARD",
33 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020034};
35
Adam Sawicki80927152018-09-07 17:27:23 +020036// Copy of internal VmaAlgorithmToStr.
37static const char* AlgorithmToStr(uint32_t algorithm)
38{
39 switch(algorithm)
40 {
41 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
42 return "Linear";
43 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
44 return "Buddy";
45 case 0:
46 return "Default";
47 default:
48 assert(0);
49 return "";
50 }
51}
52
Adam Sawickib8333fb2018-03-13 16:15:53 +010053struct AllocationSize
54{
55 uint32_t Probability;
56 VkDeviceSize BufferSizeMin, BufferSizeMax;
57 uint32_t ImageSizeMin, ImageSizeMax;
58};
59
60struct Config
61{
62 uint32_t RandSeed;
63 VkDeviceSize BeginBytesToAllocate;
64 uint32_t AdditionalOperationCount;
65 VkDeviceSize MaxBytesToAllocate;
66 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
67 std::vector<AllocationSize> AllocationSizes;
68 uint32_t ThreadCount;
69 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
70 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020071 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010072};
73
74struct Result
75{
76 duration TotalTime;
77 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
78 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
79 VkDeviceSize TotalMemoryAllocated;
80 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
81};
82
83void TestDefragmentationSimple();
84void TestDefragmentationFull();
85
86struct PoolTestConfig
87{
88 uint32_t RandSeed;
89 uint32_t ThreadCount;
90 VkDeviceSize PoolSize;
91 uint32_t FrameCount;
92 uint32_t TotalItemCount;
93 // Range for number of items used in each frame.
94 uint32_t UsedItemCountMin, UsedItemCountMax;
95 // Percent of items to make unused, and possibly make some others used in each frame.
96 uint32_t ItemsToMakeUnusedPercent;
97 std::vector<AllocationSize> AllocationSizes;
98
99 VkDeviceSize CalcAvgResourceSize() const
100 {
101 uint32_t probabilitySum = 0;
102 VkDeviceSize sizeSum = 0;
103 for(size_t i = 0; i < AllocationSizes.size(); ++i)
104 {
105 const AllocationSize& allocSize = AllocationSizes[i];
106 if(allocSize.BufferSizeMax > 0)
107 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
108 else
109 {
110 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
111 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
112 }
113 probabilitySum += allocSize.Probability;
114 }
115 return sizeSum / probabilitySum;
116 }
117
118 bool UsesBuffers() const
119 {
120 for(size_t i = 0; i < AllocationSizes.size(); ++i)
121 if(AllocationSizes[i].BufferSizeMax > 0)
122 return true;
123 return false;
124 }
125
126 bool UsesImages() const
127 {
128 for(size_t i = 0; i < AllocationSizes.size(); ++i)
129 if(AllocationSizes[i].ImageSizeMax > 0)
130 return true;
131 return false;
132 }
133};
134
135struct PoolTestResult
136{
137 duration TotalTime;
138 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
139 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
140 size_t LostAllocationCount, LostAllocationTotalSize;
141 size_t FailedAllocationCount, FailedAllocationTotalSize;
142};
143
144static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
145
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200146static uint32_t g_FrameIndex = 0;
147
Adam Sawickib8333fb2018-03-13 16:15:53 +0100148struct BufferInfo
149{
150 VkBuffer Buffer = VK_NULL_HANDLE;
151 VmaAllocation Allocation = VK_NULL_HANDLE;
152};
153
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200154static uint32_t GetAllocationStrategyCount()
155{
156 uint32_t strategyCount = 0;
157 switch(ConfigType)
158 {
159 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
160 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
161 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
162 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
163 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
164 default: assert(0);
165 }
166 return strategyCount;
167}
168
169static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
170{
171 switch(allocStrategy)
172 {
173 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
174 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
175 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
176 case 0: return "Default"; break;
177 default: assert(0); return "";
178 }
179}
180
Adam Sawickib8333fb2018-03-13 16:15:53 +0100181static void InitResult(Result& outResult)
182{
183 outResult.TotalTime = duration::zero();
184 outResult.AllocationTimeMin = duration::max();
185 outResult.AllocationTimeAvg = duration::zero();
186 outResult.AllocationTimeMax = duration::min();
187 outResult.DeallocationTimeMin = duration::max();
188 outResult.DeallocationTimeAvg = duration::zero();
189 outResult.DeallocationTimeMax = duration::min();
190 outResult.TotalMemoryAllocated = 0;
191 outResult.FreeRangeSizeAvg = 0;
192 outResult.FreeRangeSizeMax = 0;
193}
194
195class TimeRegisterObj
196{
197public:
198 TimeRegisterObj(duration& min, duration& sum, duration& max) :
199 m_Min(min),
200 m_Sum(sum),
201 m_Max(max),
202 m_TimeBeg(std::chrono::high_resolution_clock::now())
203 {
204 }
205
206 ~TimeRegisterObj()
207 {
208 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
209 m_Sum += d;
210 if(d < m_Min) m_Min = d;
211 if(d > m_Max) m_Max = d;
212 }
213
214private:
215 duration& m_Min;
216 duration& m_Sum;
217 duration& m_Max;
218 time_point m_TimeBeg;
219};
220
221struct PoolTestThreadResult
222{
223 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
224 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
225 size_t AllocationCount, DeallocationCount;
226 size_t LostAllocationCount, LostAllocationTotalSize;
227 size_t FailedAllocationCount, FailedAllocationTotalSize;
228};
229
230class AllocationTimeRegisterObj : public TimeRegisterObj
231{
232public:
233 AllocationTimeRegisterObj(Result& result) :
234 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
235 {
236 }
237};
238
239class DeallocationTimeRegisterObj : public TimeRegisterObj
240{
241public:
242 DeallocationTimeRegisterObj(Result& result) :
243 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
244 {
245 }
246};
247
248class PoolAllocationTimeRegisterObj : public TimeRegisterObj
249{
250public:
251 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
252 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
253 {
254 }
255};
256
257class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
258{
259public:
260 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
261 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
262 {
263 }
264};
265
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200266static void CurrentTimeToStr(std::string& out)
267{
268 time_t rawTime; time(&rawTime);
269 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
270 char timeStr[128];
271 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
272 out = timeStr;
273}
274
Adam Sawickib8333fb2018-03-13 16:15:53 +0100275VkResult MainTest(Result& outResult, const Config& config)
276{
277 assert(config.ThreadCount > 0);
278
279 InitResult(outResult);
280
281 RandomNumberGenerator mainRand{config.RandSeed};
282
283 time_point timeBeg = std::chrono::high_resolution_clock::now();
284
285 std::atomic<size_t> allocationCount = 0;
286 VkResult res = VK_SUCCESS;
287
288 uint32_t memUsageProbabilitySum =
289 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
290 config.MemUsageProbability[2] + config.MemUsageProbability[3];
291 assert(memUsageProbabilitySum > 0);
292
293 uint32_t allocationSizeProbabilitySum = std::accumulate(
294 config.AllocationSizes.begin(),
295 config.AllocationSizes.end(),
296 0u,
297 [](uint32_t sum, const AllocationSize& allocSize) {
298 return sum + allocSize.Probability;
299 });
300
301 struct Allocation
302 {
303 VkBuffer Buffer;
304 VkImage Image;
305 VmaAllocation Alloc;
306 };
307
308 std::vector<Allocation> commonAllocations;
309 std::mutex commonAllocationsMutex;
310
311 auto Allocate = [&](
312 VkDeviceSize bufferSize,
313 const VkExtent2D imageExtent,
314 RandomNumberGenerator& localRand,
315 VkDeviceSize& totalAllocatedBytes,
316 std::vector<Allocation>& allocations) -> VkResult
317 {
318 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
319
320 uint32_t memUsageIndex = 0;
321 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
322 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
323 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
324
325 VmaAllocationCreateInfo memReq = {};
326 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200327 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100328
329 Allocation allocation = {};
330 VmaAllocationInfo allocationInfo;
331
332 // Buffer
333 if(bufferSize > 0)
334 {
335 assert(imageExtent.width == 0);
336 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
337 bufferInfo.size = bufferSize;
338 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
339
340 {
341 AllocationTimeRegisterObj timeRegisterObj{outResult};
342 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
343 }
344 }
345 // Image
346 else
347 {
348 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
349 imageInfo.imageType = VK_IMAGE_TYPE_2D;
350 imageInfo.extent.width = imageExtent.width;
351 imageInfo.extent.height = imageExtent.height;
352 imageInfo.extent.depth = 1;
353 imageInfo.mipLevels = 1;
354 imageInfo.arrayLayers = 1;
355 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
356 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
357 VK_IMAGE_TILING_OPTIMAL :
358 VK_IMAGE_TILING_LINEAR;
359 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
360 switch(memReq.usage)
361 {
362 case VMA_MEMORY_USAGE_GPU_ONLY:
363 switch(localRand.Generate() % 3)
364 {
365 case 0:
366 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
367 break;
368 case 1:
369 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
370 break;
371 case 2:
372 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
373 break;
374 }
375 break;
376 case VMA_MEMORY_USAGE_CPU_ONLY:
377 case VMA_MEMORY_USAGE_CPU_TO_GPU:
378 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
379 break;
380 case VMA_MEMORY_USAGE_GPU_TO_CPU:
381 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
382 break;
383 }
384 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
385 imageInfo.flags = 0;
386
387 {
388 AllocationTimeRegisterObj timeRegisterObj{outResult};
389 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
390 }
391 }
392
393 if(res == VK_SUCCESS)
394 {
395 ++allocationCount;
396 totalAllocatedBytes += allocationInfo.size;
397 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
398 if(useCommonAllocations)
399 {
400 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
401 commonAllocations.push_back(allocation);
402 }
403 else
404 allocations.push_back(allocation);
405 }
406 else
407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200408 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100409 }
410 return res;
411 };
412
413 auto GetNextAllocationSize = [&](
414 VkDeviceSize& outBufSize,
415 VkExtent2D& outImageSize,
416 RandomNumberGenerator& localRand)
417 {
418 outBufSize = 0;
419 outImageSize = {0, 0};
420
421 uint32_t allocSizeIndex = 0;
422 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
423 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
424 r -= config.AllocationSizes[allocSizeIndex++].Probability;
425
426 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
427 if(allocSize.BufferSizeMax > 0)
428 {
429 assert(allocSize.ImageSizeMax == 0);
430 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
431 outBufSize = allocSize.BufferSizeMin;
432 else
433 {
434 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
435 outBufSize = outBufSize / 16 * 16;
436 }
437 }
438 else
439 {
440 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
441 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
442 else
443 {
444 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
445 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
446 }
447 }
448 };
449
450 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
451 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
452
453 auto ThreadProc = [&](uint32_t randSeed) -> void
454 {
455 RandomNumberGenerator threadRand(randSeed);
456 VkDeviceSize threadTotalAllocatedBytes = 0;
457 std::vector<Allocation> threadAllocations;
458 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
459 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
460 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
461
462 // BEGIN ALLOCATIONS
463 for(;;)
464 {
465 VkDeviceSize bufferSize = 0;
466 VkExtent2D imageExtent = {};
467 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
468 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
469 threadBeginBytesToAllocate)
470 {
471 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
472 break;
473 }
474 else
475 break;
476 }
477
478 // ADDITIONAL ALLOCATIONS AND FREES
479 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
480 {
481 VkDeviceSize bufferSize = 0;
482 VkExtent2D imageExtent = {};
483 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
484
485 // true = allocate, false = free
486 bool allocate = threadRand.Generate() % 2 != 0;
487
488 if(allocate)
489 {
490 if(threadTotalAllocatedBytes +
491 bufferSize +
492 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
493 threadMaxBytesToAllocate)
494 {
495 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
496 break;
497 }
498 }
499 else
500 {
501 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
502 if(useCommonAllocations)
503 {
504 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
505 if(!commonAllocations.empty())
506 {
507 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
508 VmaAllocationInfo allocationInfo;
509 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
510 if(threadTotalAllocatedBytes >= allocationInfo.size)
511 {
512 DeallocationTimeRegisterObj timeRegisterObj{outResult};
513 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
514 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
515 else
516 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
517 threadTotalAllocatedBytes -= allocationInfo.size;
518 commonAllocations.erase(commonAllocations.begin() + indexToFree);
519 }
520 }
521 }
522 else
523 {
524 if(!threadAllocations.empty())
525 {
526 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
527 VmaAllocationInfo allocationInfo;
528 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
529 if(threadTotalAllocatedBytes >= allocationInfo.size)
530 {
531 DeallocationTimeRegisterObj timeRegisterObj{outResult};
532 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
533 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
534 else
535 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
536 threadTotalAllocatedBytes -= allocationInfo.size;
537 threadAllocations.erase(threadAllocations.begin() + indexToFree);
538 }
539 }
540 }
541 }
542 }
543
544 ++numThreadsReachedMaxAllocations;
545
546 WaitForSingleObject(threadsFinishEvent, INFINITE);
547
548 // DEALLOCATION
549 while(!threadAllocations.empty())
550 {
551 size_t indexToFree = 0;
552 switch(config.FreeOrder)
553 {
554 case FREE_ORDER::FORWARD:
555 indexToFree = 0;
556 break;
557 case FREE_ORDER::BACKWARD:
558 indexToFree = threadAllocations.size() - 1;
559 break;
560 case FREE_ORDER::RANDOM:
561 indexToFree = mainRand.Generate() % threadAllocations.size();
562 break;
563 }
564
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 }
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 };
575
576 uint32_t threadRandSeed = mainRand.Generate();
577 std::vector<std::thread> bkgThreads;
578 for(size_t i = 0; i < config.ThreadCount; ++i)
579 {
580 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
581 }
582
583 // Wait for threads reached max allocations
584 while(numThreadsReachedMaxAllocations < config.ThreadCount)
585 Sleep(0);
586
587 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
588 VmaStats vmaStats = {};
589 vmaCalculateStats(g_hAllocator, &vmaStats);
590 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
591 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
592 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
593
594 // Signal threads to deallocate
595 SetEvent(threadsFinishEvent);
596
597 // Wait for threads finished
598 for(size_t i = 0; i < bkgThreads.size(); ++i)
599 bkgThreads[i].join();
600 bkgThreads.clear();
601
602 CloseHandle(threadsFinishEvent);
603
604 // Deallocate remaining common resources
605 while(!commonAllocations.empty())
606 {
607 size_t indexToFree = 0;
608 switch(config.FreeOrder)
609 {
610 case FREE_ORDER::FORWARD:
611 indexToFree = 0;
612 break;
613 case FREE_ORDER::BACKWARD:
614 indexToFree = commonAllocations.size() - 1;
615 break;
616 case FREE_ORDER::RANDOM:
617 indexToFree = mainRand.Generate() % commonAllocations.size();
618 break;
619 }
620
621 {
622 DeallocationTimeRegisterObj timeRegisterObj{outResult};
623 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
624 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
625 else
626 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
627 }
628 commonAllocations.erase(commonAllocations.begin() + indexToFree);
629 }
630
631 if(allocationCount)
632 {
633 outResult.AllocationTimeAvg /= allocationCount;
634 outResult.DeallocationTimeAvg /= allocationCount;
635 }
636
637 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
638
639 return res;
640}
641
Adam Sawickie44c6262018-06-15 14:30:39 +0200642static void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100643{
644 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200645 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100646 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200647 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100648}
649
650struct AllocInfo
651{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200652 VmaAllocation m_Allocation = VK_NULL_HANDLE;
653 VkBuffer m_Buffer = VK_NULL_HANDLE;
654 VkImage m_Image = VK_NULL_HANDLE;
655 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100656 union
657 {
658 VkBufferCreateInfo m_BufferInfo;
659 VkImageCreateInfo m_ImageInfo;
660 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200661
662 void CreateBuffer(
663 const VkBufferCreateInfo& bufCreateInfo,
664 const VmaAllocationCreateInfo& allocCreateInfo);
665 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100666};
667
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200668void AllocInfo::CreateBuffer(
669 const VkBufferCreateInfo& bufCreateInfo,
670 const VmaAllocationCreateInfo& allocCreateInfo)
671{
672 m_BufferInfo = bufCreateInfo;
673 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
674 TEST(res == VK_SUCCESS);
675}
676
677void AllocInfo::Destroy()
678{
679 if(m_Image)
680 {
681 vkDestroyImage(g_hDevice, m_Image, nullptr);
682 }
683 if(m_Buffer)
684 {
685 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
686 }
687 if(m_Allocation)
688 {
689 vmaFreeMemory(g_hAllocator, m_Allocation);
690 }
691}
692
Adam Sawickif2975342018-10-16 13:49:02 +0200693class StagingBufferCollection
694{
695public:
696 StagingBufferCollection() { }
697 ~StagingBufferCollection();
698 // Returns false if maximum total size of buffers would be exceeded.
699 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
700 void ReleaseAllBuffers();
701
702private:
703 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
704 struct BufInfo
705 {
706 VmaAllocation Allocation = VK_NULL_HANDLE;
707 VkBuffer Buffer = VK_NULL_HANDLE;
708 VkDeviceSize Size = VK_WHOLE_SIZE;
709 void* MappedPtr = nullptr;
710 bool Used = false;
711 };
712 std::vector<BufInfo> m_Bufs;
713 // Including both used and unused.
714 VkDeviceSize m_TotalSize = 0;
715};
716
717StagingBufferCollection::~StagingBufferCollection()
718{
719 for(size_t i = m_Bufs.size(); i--; )
720 {
721 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
722 }
723}
724
725bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
726{
727 assert(size <= MAX_TOTAL_SIZE);
728
729 // Try to find existing unused buffer with best size.
730 size_t bestIndex = SIZE_MAX;
731 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
732 {
733 BufInfo& currBufInfo = m_Bufs[i];
734 if(!currBufInfo.Used && currBufInfo.Size >= size &&
735 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
736 {
737 bestIndex = i;
738 }
739 }
740
741 if(bestIndex != SIZE_MAX)
742 {
743 m_Bufs[bestIndex].Used = true;
744 outBuffer = m_Bufs[bestIndex].Buffer;
745 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
746 return true;
747 }
748
749 // Allocate new buffer with requested size.
750 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
751 {
752 BufInfo bufInfo;
753 bufInfo.Size = size;
754 bufInfo.Used = true;
755
756 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
757 bufCreateInfo.size = size;
758 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
759
760 VmaAllocationCreateInfo allocCreateInfo = {};
761 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
762 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
763
764 VmaAllocationInfo allocInfo;
765 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
766 bufInfo.MappedPtr = allocInfo.pMappedData;
767 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
768
769 outBuffer = bufInfo.Buffer;
770 outMappedPtr = bufInfo.MappedPtr;
771
772 m_Bufs.push_back(std::move(bufInfo));
773
774 m_TotalSize += size;
775
776 return true;
777 }
778
779 // There are some unused but smaller buffers: Free them and try again.
780 bool hasUnused = false;
781 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
782 {
783 if(!m_Bufs[i].Used)
784 {
785 hasUnused = true;
786 break;
787 }
788 }
789 if(hasUnused)
790 {
791 for(size_t i = m_Bufs.size(); i--; )
792 {
793 if(!m_Bufs[i].Used)
794 {
795 m_TotalSize -= m_Bufs[i].Size;
796 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
797 m_Bufs.erase(m_Bufs.begin() + i);
798 }
799 }
800
801 return AcquireBuffer(size, outBuffer, outMappedPtr);
802 }
803
804 return false;
805}
806
807void StagingBufferCollection::ReleaseAllBuffers()
808{
809 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
810 {
811 m_Bufs[i].Used = false;
812 }
813}
814
815static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
816{
817 StagingBufferCollection stagingBufs;
818
819 bool cmdBufferStarted = false;
820 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
821 {
822 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
823 if(currAllocInfo.m_Buffer)
824 {
825 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
826
827 VkBuffer stagingBuf = VK_NULL_HANDLE;
828 void* stagingBufMappedPtr = nullptr;
829 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
830 {
831 TEST(cmdBufferStarted);
832 EndSingleTimeCommands();
833 stagingBufs.ReleaseAllBuffers();
834 cmdBufferStarted = false;
835
836 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
837 TEST(ok);
838 }
839
840 // Fill staging buffer.
841 {
842 assert(size % sizeof(uint32_t) == 0);
843 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
844 uint32_t val = currAllocInfo.m_StartValue;
845 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
846 {
847 *stagingValPtr = val;
848 ++stagingValPtr;
849 ++val;
850 }
851 }
852
853 // Issue copy command from staging buffer to destination buffer.
854 if(!cmdBufferStarted)
855 {
856 cmdBufferStarted = true;
857 BeginSingleTimeCommands();
858 }
859
860 VkBufferCopy copy = {};
861 copy.srcOffset = 0;
862 copy.dstOffset = 0;
863 copy.size = size;
864 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
865 }
866 else
867 {
868 TEST(0 && "Images not currently supported.");
869 }
870 }
871
872 if(cmdBufferStarted)
873 {
874 EndSingleTimeCommands();
875 stagingBufs.ReleaseAllBuffers();
876 }
877}
878
879static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
880{
881 StagingBufferCollection stagingBufs;
882
883 bool cmdBufferStarted = false;
884 size_t validateAllocIndexOffset = 0;
885 std::vector<void*> validateStagingBuffers;
886 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
887 {
888 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
889 if(currAllocInfo.m_Buffer)
890 {
891 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
892
893 VkBuffer stagingBuf = VK_NULL_HANDLE;
894 void* stagingBufMappedPtr = nullptr;
895 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
896 {
897 TEST(cmdBufferStarted);
898 EndSingleTimeCommands();
899 cmdBufferStarted = false;
900
901 for(size_t validateIndex = 0;
902 validateIndex < validateStagingBuffers.size();
903 ++validateIndex)
904 {
905 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
906 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
907 TEST(validateSize % sizeof(uint32_t) == 0);
908 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
909 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
910 bool valid = true;
911 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
912 {
913 if(*stagingValPtr != val)
914 {
915 valid = false;
916 break;
917 }
918 ++stagingValPtr;
919 ++val;
920 }
921 TEST(valid);
922 }
923
924 stagingBufs.ReleaseAllBuffers();
925
926 validateAllocIndexOffset = allocInfoIndex;
927 validateStagingBuffers.clear();
928
929 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
930 TEST(ok);
931 }
932
933 // Issue copy command from staging buffer to destination buffer.
934 if(!cmdBufferStarted)
935 {
936 cmdBufferStarted = true;
937 BeginSingleTimeCommands();
938 }
939
940 VkBufferCopy copy = {};
941 copy.srcOffset = 0;
942 copy.dstOffset = 0;
943 copy.size = size;
944 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
945
946 // Sava mapped pointer for later validation.
947 validateStagingBuffers.push_back(stagingBufMappedPtr);
948 }
949 else
950 {
951 TEST(0 && "Images not currently supported.");
952 }
953 }
954
955 if(cmdBufferStarted)
956 {
957 EndSingleTimeCommands();
958
959 for(size_t validateIndex = 0;
960 validateIndex < validateStagingBuffers.size();
961 ++validateIndex)
962 {
963 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
964 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
965 TEST(validateSize % sizeof(uint32_t) == 0);
966 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
967 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
968 bool valid = true;
969 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
970 {
971 if(*stagingValPtr != val)
972 {
973 valid = false;
974 break;
975 }
976 ++stagingValPtr;
977 ++val;
978 }
979 TEST(valid);
980 }
981
982 stagingBufs.ReleaseAllBuffers();
983 }
984}
985
Adam Sawickib8333fb2018-03-13 16:15:53 +0100986static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
987{
988 outMemReq = {};
989 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
990 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
991}
992
993static void CreateBuffer(
994 VmaPool pool,
995 const VkBufferCreateInfo& bufCreateInfo,
996 bool persistentlyMapped,
997 AllocInfo& outAllocInfo)
998{
999 outAllocInfo = {};
1000 outAllocInfo.m_BufferInfo = bufCreateInfo;
1001
1002 VmaAllocationCreateInfo allocCreateInfo = {};
1003 allocCreateInfo.pool = pool;
1004 if(persistentlyMapped)
1005 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1006
1007 VmaAllocationInfo vmaAllocInfo = {};
1008 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1009
1010 // Setup StartValue and fill.
1011 {
1012 outAllocInfo.m_StartValue = (uint32_t)rand();
1013 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001014 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015 if(!persistentlyMapped)
1016 {
1017 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1018 }
1019
1020 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001021 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001022 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1023 data[i] = value++;
1024
1025 if(!persistentlyMapped)
1026 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1027 }
1028}
1029
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001030static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001031{
1032 outAllocation.m_Allocation = nullptr;
1033 outAllocation.m_Buffer = nullptr;
1034 outAllocation.m_Image = nullptr;
1035 outAllocation.m_StartValue = (uint32_t)rand();
1036
1037 VmaAllocationCreateInfo vmaMemReq;
1038 GetMemReq(vmaMemReq);
1039
1040 VmaAllocationInfo allocInfo;
1041
1042 const bool isBuffer = true;//(rand() & 0x1) != 0;
1043 const bool isLarge = (rand() % 16) == 0;
1044 if(isBuffer)
1045 {
1046 const uint32_t bufferSize = isLarge ?
1047 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1048 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1049
1050 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1051 bufferInfo.size = bufferSize;
1052 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1053
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001054 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001055 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001056 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001057 }
1058 else
1059 {
1060 const uint32_t imageSizeX = isLarge ?
1061 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1062 rand() % 1024 + 1; // 1 ... 1024
1063 const uint32_t imageSizeY = isLarge ?
1064 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1065 rand() % 1024 + 1; // 1 ... 1024
1066
1067 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1068 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1069 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1070 imageInfo.extent.width = imageSizeX;
1071 imageInfo.extent.height = imageSizeY;
1072 imageInfo.extent.depth = 1;
1073 imageInfo.mipLevels = 1;
1074 imageInfo.arrayLayers = 1;
1075 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1076 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1077 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1078 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1079
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001080 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001081 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001082 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 }
1084
1085 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1086 if(allocInfo.pMappedData == nullptr)
1087 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001088 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001089 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001090 }
1091
1092 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001093 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001094 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1095 data[i] = value++;
1096
1097 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001098 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001099}
1100
1101static void DestroyAllocation(const AllocInfo& allocation)
1102{
1103 if(allocation.m_Buffer)
1104 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1105 else
1106 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1107}
1108
1109static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1110{
1111 for(size_t i = allocations.size(); i--; )
1112 DestroyAllocation(allocations[i]);
1113 allocations.clear();
1114}
1115
1116static void ValidateAllocationData(const AllocInfo& allocation)
1117{
1118 VmaAllocationInfo allocInfo;
1119 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1120
1121 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1122 if(allocInfo.pMappedData == nullptr)
1123 {
1124 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126 }
1127
1128 uint32_t value = allocation.m_StartValue;
1129 bool ok = true;
1130 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001131 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001132 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1133 {
1134 if(data[i] != value++)
1135 {
1136 ok = false;
1137 break;
1138 }
1139 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001140 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001141
1142 if(allocInfo.pMappedData == nullptr)
1143 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1144}
1145
1146static void RecreateAllocationResource(AllocInfo& allocation)
1147{
1148 VmaAllocationInfo allocInfo;
1149 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1150
1151 if(allocation.m_Buffer)
1152 {
1153 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1154
1155 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001156 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001157
1158 // Just to silence validation layer warnings.
1159 VkMemoryRequirements vkMemReq;
1160 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001161 TEST(vkMemReq.size == allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001162
1163 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001164 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001165 }
1166 else
1167 {
1168 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1169
1170 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001171 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001172
1173 // Just to silence validation layer warnings.
1174 VkMemoryRequirements vkMemReq;
1175 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1176
1177 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001178 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001179 }
1180}
1181
1182static void Defragment(AllocInfo* allocs, size_t allocCount,
1183 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1184 VmaDefragmentationStats* defragmentationStats = nullptr)
1185{
1186 std::vector<VmaAllocation> vmaAllocs(allocCount);
1187 for(size_t i = 0; i < allocCount; ++i)
1188 vmaAllocs[i] = allocs[i].m_Allocation;
1189
1190 std::vector<VkBool32> allocChanged(allocCount);
1191
1192 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1193 defragmentationInfo, defragmentationStats) );
1194
1195 for(size_t i = 0; i < allocCount; ++i)
1196 {
1197 if(allocChanged[i])
1198 {
1199 RecreateAllocationResource(allocs[i]);
1200 }
1201 }
1202}
1203
1204static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1205{
1206 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1207 ValidateAllocationData(allocInfo);
1208 });
1209}
1210
1211void TestDefragmentationSimple()
1212{
1213 wprintf(L"Test defragmentation simple\n");
1214
1215 RandomNumberGenerator rand(667);
1216
1217 const VkDeviceSize BUF_SIZE = 0x10000;
1218 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1219
1220 const VkDeviceSize MIN_BUF_SIZE = 32;
1221 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1222 auto RandomBufSize = [&]() -> VkDeviceSize {
1223 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1224 };
1225
1226 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1227 bufCreateInfo.size = BUF_SIZE;
1228 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1229
1230 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1231 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1232
1233 uint32_t memTypeIndex = UINT32_MAX;
1234 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1235
1236 VmaPoolCreateInfo poolCreateInfo = {};
1237 poolCreateInfo.blockSize = BLOCK_SIZE;
1238 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1239
1240 VmaPool pool;
1241 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1242
1243 std::vector<AllocInfo> allocations;
1244
1245 // persistentlyMappedOption = 0 - not persistently mapped.
1246 // persistentlyMappedOption = 1 - persistently mapped.
1247 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1248 {
1249 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1250 const bool persistentlyMapped = persistentlyMappedOption != 0;
1251
1252 // # Test 1
1253 // Buffers of fixed size.
1254 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1255 // Expected result: at least 1 block freed.
1256 {
1257 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1258 {
1259 AllocInfo allocInfo;
1260 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1261 allocations.push_back(allocInfo);
1262 }
1263
1264 for(size_t i = 1; i < allocations.size(); ++i)
1265 {
1266 DestroyAllocation(allocations[i]);
1267 allocations.erase(allocations.begin() + i);
1268 }
1269
1270 VmaDefragmentationStats defragStats;
1271 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001272 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1273 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001274
1275 ValidateAllocationsData(allocations.data(), allocations.size());
1276
1277 DestroyAllAllocations(allocations);
1278 }
1279
1280 // # Test 2
1281 // Buffers of fixed size.
1282 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1283 // Expected result: Each of 4 interations makes some progress.
1284 {
1285 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1286 {
1287 AllocInfo allocInfo;
1288 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1289 allocations.push_back(allocInfo);
1290 }
1291
1292 for(size_t i = 1; i < allocations.size(); ++i)
1293 {
1294 DestroyAllocation(allocations[i]);
1295 allocations.erase(allocations.begin() + i);
1296 }
1297
1298 VmaDefragmentationInfo defragInfo = {};
1299 defragInfo.maxAllocationsToMove = 1;
1300 defragInfo.maxBytesToMove = BUF_SIZE;
1301
1302 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1303 {
1304 VmaDefragmentationStats defragStats;
1305 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001306 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001307 }
1308
1309 ValidateAllocationsData(allocations.data(), allocations.size());
1310
1311 DestroyAllAllocations(allocations);
1312 }
1313
1314 // # Test 3
1315 // Buffers of variable size.
1316 // Create a number of buffers. Remove some percent of them.
1317 // Defragment while having some percent of them unmovable.
1318 // Expected result: Just simple validation.
1319 {
1320 for(size_t i = 0; i < 100; ++i)
1321 {
1322 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1323 localBufCreateInfo.size = RandomBufSize();
1324
1325 AllocInfo allocInfo;
1326 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1327 allocations.push_back(allocInfo);
1328 }
1329
1330 const uint32_t percentToDelete = 60;
1331 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1332 for(size_t i = 0; i < numberToDelete; ++i)
1333 {
1334 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1335 DestroyAllocation(allocations[indexToDelete]);
1336 allocations.erase(allocations.begin() + indexToDelete);
1337 }
1338
1339 // Non-movable allocations will be at the beginning of allocations array.
1340 const uint32_t percentNonMovable = 20;
1341 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1342 for(size_t i = 0; i < numberNonMovable; ++i)
1343 {
1344 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1345 if(indexNonMovable != i)
1346 std::swap(allocations[i], allocations[indexNonMovable]);
1347 }
1348
1349 VmaDefragmentationStats defragStats;
1350 Defragment(
1351 allocations.data() + numberNonMovable,
1352 allocations.size() - numberNonMovable,
1353 nullptr, &defragStats);
1354
1355 ValidateAllocationsData(allocations.data(), allocations.size());
1356
1357 DestroyAllAllocations(allocations);
1358 }
1359 }
1360
1361 vmaDestroyPool(g_hAllocator, pool);
1362}
1363
Adam Sawicki52076eb2018-11-22 16:14:50 +01001364void TestDefragmentationWholePool()
1365{
1366 wprintf(L"Test defragmentation whole pool\n");
1367
1368 RandomNumberGenerator rand(668);
1369
1370 const VkDeviceSize BUF_SIZE = 0x10000;
1371 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1372
1373 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1374 bufCreateInfo.size = BUF_SIZE;
1375 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1376
1377 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1378 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1379
1380 uint32_t memTypeIndex = UINT32_MAX;
1381 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1382
1383 VmaPoolCreateInfo poolCreateInfo = {};
1384 poolCreateInfo.blockSize = BLOCK_SIZE;
1385 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1386
1387 VmaDefragmentationStats defragStats[2];
1388 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1389 {
1390 VmaPool pool;
1391 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1392
1393 std::vector<AllocInfo> allocations;
1394
1395 // Buffers of fixed size.
1396 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1397 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1398 {
1399 AllocInfo allocInfo;
1400 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1401 allocations.push_back(allocInfo);
1402 }
1403
1404 for(size_t i = 1; i < allocations.size(); ++i)
1405 {
1406 DestroyAllocation(allocations[i]);
1407 allocations.erase(allocations.begin() + i);
1408 }
1409
1410 VmaDefragmentationInfo2 defragInfo = {};
1411 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1412 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1413 std::vector<VmaAllocation> allocationsToDefrag;
1414 if(caseIndex == 0)
1415 {
1416 defragInfo.poolCount = 1;
1417 defragInfo.pPools = &pool;
1418 }
1419 else
1420 {
1421 const size_t allocCount = allocations.size();
1422 allocationsToDefrag.resize(allocCount);
1423 std::transform(
1424 allocations.begin(), allocations.end(),
1425 allocationsToDefrag.begin(),
1426 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1427 defragInfo.allocationCount = (uint32_t)allocCount;
1428 defragInfo.pAllocations = allocationsToDefrag.data();
1429 }
1430
1431 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1432 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1433 TEST(res >= VK_SUCCESS);
1434 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1435
1436 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1437
1438 ValidateAllocationsData(allocations.data(), allocations.size());
1439
1440 DestroyAllAllocations(allocations);
1441
1442 vmaDestroyPool(g_hAllocator, pool);
1443 }
1444
1445 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1446 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1447 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1448 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1449}
1450
Adam Sawickib8333fb2018-03-13 16:15:53 +01001451void TestDefragmentationFull()
1452{
1453 std::vector<AllocInfo> allocations;
1454
1455 // Create initial allocations.
1456 for(size_t i = 0; i < 400; ++i)
1457 {
1458 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001459 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001460 allocations.push_back(allocation);
1461 }
1462
1463 // Delete random allocations
1464 const size_t allocationsToDeletePercent = 80;
1465 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1466 for(size_t i = 0; i < allocationsToDelete; ++i)
1467 {
1468 size_t index = (size_t)rand() % allocations.size();
1469 DestroyAllocation(allocations[index]);
1470 allocations.erase(allocations.begin() + index);
1471 }
1472
1473 for(size_t i = 0; i < allocations.size(); ++i)
1474 ValidateAllocationData(allocations[i]);
1475
Adam Sawicki0667e332018-08-24 17:26:44 +02001476 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001477
1478 {
1479 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1480 for(size_t i = 0; i < allocations.size(); ++i)
1481 vmaAllocations[i] = allocations[i].m_Allocation;
1482
1483 const size_t nonMovablePercent = 0;
1484 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1485 for(size_t i = 0; i < nonMovableCount; ++i)
1486 {
1487 size_t index = (size_t)rand() % vmaAllocations.size();
1488 vmaAllocations.erase(vmaAllocations.begin() + index);
1489 }
1490
1491 const uint32_t defragCount = 1;
1492 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1493 {
1494 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1495
1496 VmaDefragmentationInfo defragmentationInfo;
1497 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1498 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1499
1500 wprintf(L"Defragmentation #%u\n", defragIndex);
1501
1502 time_point begTime = std::chrono::high_resolution_clock::now();
1503
1504 VmaDefragmentationStats stats;
1505 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001506 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001507
1508 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1509
1510 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1511 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1512 wprintf(L"Time: %.2f s\n", defragmentDuration);
1513
1514 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1515 {
1516 if(allocationsChanged[i])
1517 {
1518 RecreateAllocationResource(allocations[i]);
1519 }
1520 }
1521
1522 for(size_t i = 0; i < allocations.size(); ++i)
1523 ValidateAllocationData(allocations[i]);
1524
Adam Sawicki0667e332018-08-24 17:26:44 +02001525 //wchar_t fileName[MAX_PATH];
1526 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1527 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001528 }
1529 }
1530
1531 // Destroy all remaining allocations.
1532 DestroyAllAllocations(allocations);
1533}
1534
Adam Sawickic6ede152018-11-16 17:04:14 +01001535static void TestDefragmentationGpu(uint32_t flags)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001536{
Adam Sawickic6ede152018-11-16 17:04:14 +01001537 const wchar_t* flagsName = L"0";
1538 switch(flags)
1539 {
1540 case VMA_DEFRAGMENTATION_FAST_ALGORITHM_BIT:
1541 flagsName = L"FAST";
1542 break;
1543 case VMA_DEFRAGMENTATION_OPTIMAL_ALGORITHM_BIT:
1544 flagsName = L"OPTIMAL";
1545 break;
1546 }
1547
1548 wprintf(L"Test defragmentation GPU (%s)\n", flagsName);
Adam Sawicki05704002018-11-08 16:07:29 +01001549 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001550
1551 std::vector<AllocInfo> allocations;
1552
1553 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001554 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1555 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001556 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001557 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1558 const size_t percentToLeave = 30;
1559 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001560 RandomNumberGenerator rand = { 234522 };
1561
1562 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001563
1564 VmaAllocationCreateInfo allocCreateInfo = {};
1565 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001566 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001567
1568 // Create all intended buffers.
1569 for(size_t i = 0; i < bufCount; ++i)
1570 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001571 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1572
1573 if(rand.Generate() % 100 < percentNonMovable)
1574 {
1575 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1576 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1577 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1578 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1579 }
1580 else
1581 {
1582 // Different usage just to see different color in output from VmaDumpVis.
1583 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1584 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1585 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1586 // And in JSON dump.
1587 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1588 }
1589
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001590 AllocInfo alloc;
1591 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1592 alloc.m_StartValue = rand.Generate();
1593 allocations.push_back(alloc);
1594 }
1595
1596 // Destroy some percentage of them.
1597 {
1598 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1599 for(size_t i = 0; i < buffersToDestroy; ++i)
1600 {
1601 const size_t index = rand.Generate() % allocations.size();
1602 allocations[index].Destroy();
1603 allocations.erase(allocations.begin() + index);
1604 }
1605 }
1606
1607 // Fill them with meaningful data.
1608 UploadGpuData(allocations.data(), allocations.size());
1609
Adam Sawickic6ede152018-11-16 17:04:14 +01001610 wchar_t fileName[MAX_PATH];
1611 swprintf_s(fileName, L"GPU_defragmentation_%s_A_before.json", flagsName);
1612 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001613
1614 // Defragment using GPU only.
1615 {
1616 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001617
Adam Sawickic6ede152018-11-16 17:04:14 +01001618 std::vector<VmaAllocation> allocationPtrs;
1619 std::vector<VkBool32> allocationChanged;
1620 std::vector<size_t> allocationOriginalIndex;
1621
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001622 for(size_t i = 0; i < allocCount; ++i)
1623 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001624 VmaAllocationInfo allocInfo = {};
1625 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1626 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1627 {
1628 allocationPtrs.push_back(allocations[i].m_Allocation);
1629 allocationChanged.push_back(VK_FALSE);
1630 allocationOriginalIndex.push_back(i);
1631 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001632 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001633
1634 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001635
1636 BeginSingleTimeCommands();
1637
1638 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawickic6ede152018-11-16 17:04:14 +01001639 defragInfo.flags = flags;
1640 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001641 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001642 defragInfo.pAllocationsChanged = allocationChanged.data();
1643 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001644 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1645 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1646
1647 VmaDefragmentationStats stats = {};
1648 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1649 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1650 TEST(res >= VK_SUCCESS);
1651
1652 EndSingleTimeCommands();
1653
1654 vmaDefragmentationEnd(g_hAllocator, ctx);
1655
Adam Sawickic6ede152018-11-16 17:04:14 +01001656 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001657 {
1658 if(allocationChanged[i])
1659 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001660 const size_t origAllocIndex = allocationOriginalIndex[i];
1661 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001662 }
1663 }
1664
Adam Sawicki440307e2018-10-18 15:05:19 +02001665 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1666 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001667 }
1668
1669 ValidateGpuData(allocations.data(), allocations.size());
1670
Adam Sawickic6ede152018-11-16 17:04:14 +01001671 swprintf_s(fileName, L"GPU_defragmentation_%s_B_after.json", flagsName);
1672 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001673
1674 // Destroy all remaining buffers.
1675 for(size_t i = allocations.size(); i--; )
1676 {
1677 allocations[i].Destroy();
1678 }
Adam Sawicki05704002018-11-08 16:07:29 +01001679
1680 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001681}
1682
Adam Sawickib8333fb2018-03-13 16:15:53 +01001683static void TestUserData()
1684{
1685 VkResult res;
1686
1687 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1688 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1689 bufCreateInfo.size = 0x10000;
1690
1691 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1692 {
1693 // Opaque pointer
1694 {
1695
1696 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1697 void* pointerToSomething = &res;
1698
1699 VmaAllocationCreateInfo allocCreateInfo = {};
1700 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1701 allocCreateInfo.pUserData = numberAsPointer;
1702 if(testIndex == 1)
1703 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1704
1705 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1706 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001707 TEST(res == VK_SUCCESS);
1708 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001709
1710 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001711 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001712
1713 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1714 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001715 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001716
1717 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1718 }
1719
1720 // String
1721 {
1722 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1723 const char* name2 = "2";
1724 const size_t name1Len = strlen(name1);
1725
1726 char* name1Buf = new char[name1Len + 1];
1727 strcpy_s(name1Buf, name1Len + 1, name1);
1728
1729 VmaAllocationCreateInfo allocCreateInfo = {};
1730 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1731 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1732 allocCreateInfo.pUserData = name1Buf;
1733 if(testIndex == 1)
1734 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1735
1736 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1737 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001738 TEST(res == VK_SUCCESS);
1739 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1740 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001741
1742 delete[] name1Buf;
1743
1744 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001745 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001746
1747 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1748 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001749 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001750
1751 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1752 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001753 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001754
1755 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1756 }
1757 }
1758}
1759
Adam Sawicki370ab182018-11-08 16:31:00 +01001760static void TestInvalidAllocations()
1761{
1762 VkResult res;
1763
1764 VmaAllocationCreateInfo allocCreateInfo = {};
1765 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1766
1767 // Try to allocate 0 bytes.
1768 {
1769 VkMemoryRequirements memReq = {};
1770 memReq.size = 0; // !!!
1771 memReq.alignment = 4;
1772 memReq.memoryTypeBits = UINT32_MAX;
1773 VmaAllocation alloc = VK_NULL_HANDLE;
1774 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1775 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1776 }
1777
1778 // Try to create buffer with size = 0.
1779 {
1780 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1781 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1782 bufCreateInfo.size = 0; // !!!
1783 VkBuffer buf = VK_NULL_HANDLE;
1784 VmaAllocation alloc = VK_NULL_HANDLE;
1785 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1786 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1787 }
1788
1789 // Try to create image with one dimension = 0.
1790 {
1791 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1792 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1793 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1794 imageCreateInfo.extent.width = 128;
1795 imageCreateInfo.extent.height = 0; // !!!
1796 imageCreateInfo.extent.depth = 1;
1797 imageCreateInfo.mipLevels = 1;
1798 imageCreateInfo.arrayLayers = 1;
1799 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1800 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1801 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1802 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1803 VkImage image = VK_NULL_HANDLE;
1804 VmaAllocation alloc = VK_NULL_HANDLE;
1805 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1806 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1807 }
1808}
1809
Adam Sawickib8333fb2018-03-13 16:15:53 +01001810static void TestMemoryRequirements()
1811{
1812 VkResult res;
1813 VkBuffer buf;
1814 VmaAllocation alloc;
1815 VmaAllocationInfo allocInfo;
1816
1817 const VkPhysicalDeviceMemoryProperties* memProps;
1818 vmaGetMemoryProperties(g_hAllocator, &memProps);
1819
1820 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1821 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1822 bufInfo.size = 128;
1823
1824 VmaAllocationCreateInfo allocCreateInfo = {};
1825
1826 // No requirements.
1827 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001828 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001829 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1830
1831 // Usage.
1832 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1833 allocCreateInfo.requiredFlags = 0;
1834 allocCreateInfo.preferredFlags = 0;
1835 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1836
1837 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001838 TEST(res == VK_SUCCESS);
1839 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001840 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1841
1842 // Required flags, preferred flags.
1843 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1844 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1845 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1846 allocCreateInfo.memoryTypeBits = 0;
1847
1848 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001849 TEST(res == VK_SUCCESS);
1850 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1851 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001852 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1853
1854 // memoryTypeBits.
1855 const uint32_t memType = allocInfo.memoryType;
1856 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1857 allocCreateInfo.requiredFlags = 0;
1858 allocCreateInfo.preferredFlags = 0;
1859 allocCreateInfo.memoryTypeBits = 1u << memType;
1860
1861 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001862 TEST(res == VK_SUCCESS);
1863 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001864 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1865
1866}
1867
1868static void TestBasics()
1869{
1870 VkResult res;
1871
1872 TestMemoryRequirements();
1873
1874 // Lost allocation
1875 {
1876 VmaAllocation alloc = VK_NULL_HANDLE;
1877 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001878 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001879
1880 VmaAllocationInfo allocInfo;
1881 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001882 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1883 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001884
1885 vmaFreeMemory(g_hAllocator, alloc);
1886 }
1887
1888 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1889 {
1890 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1891 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1892 bufCreateInfo.size = 128;
1893
1894 VmaAllocationCreateInfo allocCreateInfo = {};
1895 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1896 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1897
1898 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1899 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001900 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001901
1902 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1903
1904 // Same with OWN_MEMORY.
1905 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1906
1907 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001908 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001909
1910 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1911 }
1912
1913 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001914
1915 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001916}
1917
1918void TestHeapSizeLimit()
1919{
1920 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1921 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1922
1923 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1924 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1925 {
1926 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1927 }
1928
1929 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1930 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1931 allocatorCreateInfo.device = g_hDevice;
1932 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1933
1934 VmaAllocator hAllocator;
1935 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001936 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001937
1938 struct Item
1939 {
1940 VkBuffer hBuf;
1941 VmaAllocation hAlloc;
1942 };
1943 std::vector<Item> items;
1944
1945 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1946 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1947
1948 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
1949 VmaAllocationInfo ownAllocInfo;
1950 {
1951 VmaAllocationCreateInfo allocCreateInfo = {};
1952 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1953 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1954
1955 bufCreateInfo.size = BLOCK_SIZE / 2;
1956
1957 for(size_t i = 0; i < 2; ++i)
1958 {
1959 Item item;
1960 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001961 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001962 items.push_back(item);
1963 }
1964 }
1965
1966 // Create pool to make sure allocations must be out of this memory type.
1967 VmaPoolCreateInfo poolCreateInfo = {};
1968 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
1969 poolCreateInfo.blockSize = BLOCK_SIZE;
1970
1971 VmaPool hPool;
1972 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001973 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001974
1975 // 2. Allocate normal buffers from all the remaining memory.
1976 {
1977 VmaAllocationCreateInfo allocCreateInfo = {};
1978 allocCreateInfo.pool = hPool;
1979
1980 bufCreateInfo.size = BLOCK_SIZE / 2;
1981
1982 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
1983 for(size_t i = 0; i < bufCount; ++i)
1984 {
1985 Item item;
1986 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001987 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001988 items.push_back(item);
1989 }
1990 }
1991
1992 // 3. Allocation of one more (even small) buffer should fail.
1993 {
1994 VmaAllocationCreateInfo allocCreateInfo = {};
1995 allocCreateInfo.pool = hPool;
1996
1997 bufCreateInfo.size = 128;
1998
1999 VkBuffer hBuf;
2000 VmaAllocation hAlloc;
2001 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002002 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002003 }
2004
2005 // Destroy everything.
2006 for(size_t i = items.size(); i--; )
2007 {
2008 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2009 }
2010
2011 vmaDestroyPool(hAllocator, hPool);
2012
2013 vmaDestroyAllocator(hAllocator);
2014}
2015
Adam Sawicki212a4a62018-06-14 15:44:45 +02002016#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002017static void TestDebugMargin()
2018{
2019 if(VMA_DEBUG_MARGIN == 0)
2020 {
2021 return;
2022 }
2023
2024 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002025 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002026
2027 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002028 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002029
2030 // Create few buffers of different size.
2031 const size_t BUF_COUNT = 10;
2032 BufferInfo buffers[BUF_COUNT];
2033 VmaAllocationInfo allocInfo[BUF_COUNT];
2034 for(size_t i = 0; i < 10; ++i)
2035 {
2036 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002037 // Last one will be mapped.
2038 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002039
2040 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002041 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002042 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002043 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002044
2045 if(i == BUF_COUNT - 1)
2046 {
2047 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002048 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002049 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2050 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2051 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002052 }
2053
2054 // Check if their offsets preserve margin between them.
2055 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2056 {
2057 if(lhs.deviceMemory != rhs.deviceMemory)
2058 {
2059 return lhs.deviceMemory < rhs.deviceMemory;
2060 }
2061 return lhs.offset < rhs.offset;
2062 });
2063 for(size_t i = 1; i < BUF_COUNT; ++i)
2064 {
2065 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2066 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002067 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002068 }
2069 }
2070
Adam Sawicki212a4a62018-06-14 15:44:45 +02002071 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002072 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002073
Adam Sawicki73b16652018-06-11 16:39:25 +02002074 // Destroy all buffers.
2075 for(size_t i = BUF_COUNT; i--; )
2076 {
2077 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2078 }
2079}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002080#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002081
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002082static void TestLinearAllocator()
2083{
2084 wprintf(L"Test linear allocator\n");
2085
2086 RandomNumberGenerator rand{645332};
2087
2088 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2089 sampleBufCreateInfo.size = 1024; // Whatever.
2090 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2091
2092 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2093 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2094
2095 VmaPoolCreateInfo poolCreateInfo = {};
2096 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002097 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002098
Adam Sawickiee082772018-06-20 17:45:49 +02002099 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002100 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2101 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2102
2103 VmaPool pool = nullptr;
2104 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002105 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002106
2107 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2108
2109 VmaAllocationCreateInfo allocCreateInfo = {};
2110 allocCreateInfo.pool = pool;
2111
2112 constexpr size_t maxBufCount = 100;
2113 std::vector<BufferInfo> bufInfo;
2114
2115 constexpr VkDeviceSize bufSizeMin = 16;
2116 constexpr VkDeviceSize bufSizeMax = 1024;
2117 VmaAllocationInfo allocInfo;
2118 VkDeviceSize prevOffset = 0;
2119
2120 // Test one-time free.
2121 for(size_t i = 0; i < 2; ++i)
2122 {
2123 // Allocate number of buffers of varying size that surely fit into this block.
2124 VkDeviceSize bufSumSize = 0;
2125 for(size_t i = 0; i < maxBufCount; ++i)
2126 {
2127 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2128 BufferInfo newBufInfo;
2129 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2130 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002131 TEST(res == VK_SUCCESS);
2132 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002133 bufInfo.push_back(newBufInfo);
2134 prevOffset = allocInfo.offset;
2135 bufSumSize += bufCreateInfo.size;
2136 }
2137
2138 // Validate pool stats.
2139 VmaPoolStats stats;
2140 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002141 TEST(stats.size == poolCreateInfo.blockSize);
2142 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2143 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002144
2145 // Destroy the buffers in random order.
2146 while(!bufInfo.empty())
2147 {
2148 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2149 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2150 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2151 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2152 }
2153 }
2154
2155 // Test stack.
2156 {
2157 // Allocate number of buffers of varying size that surely fit into this block.
2158 for(size_t i = 0; i < maxBufCount; ++i)
2159 {
2160 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2161 BufferInfo newBufInfo;
2162 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2163 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002164 TEST(res == VK_SUCCESS);
2165 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002166 bufInfo.push_back(newBufInfo);
2167 prevOffset = allocInfo.offset;
2168 }
2169
2170 // Destroy few buffers from top of the stack.
2171 for(size_t i = 0; i < maxBufCount / 5; ++i)
2172 {
2173 const BufferInfo& currBufInfo = bufInfo.back();
2174 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2175 bufInfo.pop_back();
2176 }
2177
2178 // Create some more
2179 for(size_t i = 0; i < maxBufCount / 5; ++i)
2180 {
2181 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2182 BufferInfo newBufInfo;
2183 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2184 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002185 TEST(res == VK_SUCCESS);
2186 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002187 bufInfo.push_back(newBufInfo);
2188 prevOffset = allocInfo.offset;
2189 }
2190
2191 // Destroy the buffers in reverse order.
2192 while(!bufInfo.empty())
2193 {
2194 const BufferInfo& currBufInfo = bufInfo.back();
2195 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2196 bufInfo.pop_back();
2197 }
2198 }
2199
Adam Sawickiee082772018-06-20 17:45:49 +02002200 // Test ring buffer.
2201 {
2202 // Allocate number of buffers that surely fit into this block.
2203 bufCreateInfo.size = bufSizeMax;
2204 for(size_t i = 0; i < maxBufCount; ++i)
2205 {
2206 BufferInfo newBufInfo;
2207 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2208 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002209 TEST(res == VK_SUCCESS);
2210 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002211 bufInfo.push_back(newBufInfo);
2212 prevOffset = allocInfo.offset;
2213 }
2214
2215 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2216 const size_t buffersPerIter = maxBufCount / 10 - 1;
2217 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2218 for(size_t iter = 0; iter < iterCount; ++iter)
2219 {
2220 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2221 {
2222 const BufferInfo& currBufInfo = bufInfo.front();
2223 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2224 bufInfo.erase(bufInfo.begin());
2225 }
2226 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2227 {
2228 BufferInfo newBufInfo;
2229 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2230 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002231 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002232 bufInfo.push_back(newBufInfo);
2233 }
2234 }
2235
2236 // Allocate buffers until we reach out-of-memory.
2237 uint32_t debugIndex = 0;
2238 while(res == VK_SUCCESS)
2239 {
2240 BufferInfo newBufInfo;
2241 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2242 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2243 if(res == VK_SUCCESS)
2244 {
2245 bufInfo.push_back(newBufInfo);
2246 }
2247 else
2248 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002249 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002250 }
2251 ++debugIndex;
2252 }
2253
2254 // Destroy the buffers in random order.
2255 while(!bufInfo.empty())
2256 {
2257 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2258 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2259 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2260 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2261 }
2262 }
2263
Adam Sawicki680b2252018-08-22 14:47:32 +02002264 // Test double stack.
2265 {
2266 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2267 VkDeviceSize prevOffsetLower = 0;
2268 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2269 for(size_t i = 0; i < maxBufCount; ++i)
2270 {
2271 const bool upperAddress = (i % 2) != 0;
2272 if(upperAddress)
2273 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2274 else
2275 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2276 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2277 BufferInfo newBufInfo;
2278 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2279 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002280 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002281 if(upperAddress)
2282 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002283 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002284 prevOffsetUpper = allocInfo.offset;
2285 }
2286 else
2287 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002288 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002289 prevOffsetLower = allocInfo.offset;
2290 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002291 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002292 bufInfo.push_back(newBufInfo);
2293 }
2294
2295 // Destroy few buffers from top of the stack.
2296 for(size_t i = 0; i < maxBufCount / 5; ++i)
2297 {
2298 const BufferInfo& currBufInfo = bufInfo.back();
2299 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2300 bufInfo.pop_back();
2301 }
2302
2303 // Create some more
2304 for(size_t i = 0; i < maxBufCount / 5; ++i)
2305 {
2306 const bool upperAddress = (i % 2) != 0;
2307 if(upperAddress)
2308 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2309 else
2310 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2311 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2312 BufferInfo newBufInfo;
2313 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2314 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002315 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002316 bufInfo.push_back(newBufInfo);
2317 }
2318
2319 // Destroy the buffers in reverse order.
2320 while(!bufInfo.empty())
2321 {
2322 const BufferInfo& currBufInfo = bufInfo.back();
2323 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2324 bufInfo.pop_back();
2325 }
2326
2327 // Create buffers on both sides until we reach out of memory.
2328 prevOffsetLower = 0;
2329 prevOffsetUpper = poolCreateInfo.blockSize;
2330 res = VK_SUCCESS;
2331 for(size_t i = 0; res == VK_SUCCESS; ++i)
2332 {
2333 const bool upperAddress = (i % 2) != 0;
2334 if(upperAddress)
2335 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2336 else
2337 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2338 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2339 BufferInfo newBufInfo;
2340 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2341 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2342 if(res == VK_SUCCESS)
2343 {
2344 if(upperAddress)
2345 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002346 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002347 prevOffsetUpper = allocInfo.offset;
2348 }
2349 else
2350 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002351 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002352 prevOffsetLower = allocInfo.offset;
2353 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002354 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002355 bufInfo.push_back(newBufInfo);
2356 }
2357 }
2358
2359 // Destroy the buffers in random order.
2360 while(!bufInfo.empty())
2361 {
2362 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2363 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2364 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2365 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2366 }
2367
2368 // Create buffers on upper side only, constant size, until we reach out of memory.
2369 prevOffsetUpper = poolCreateInfo.blockSize;
2370 res = VK_SUCCESS;
2371 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2372 bufCreateInfo.size = bufSizeMax;
2373 for(size_t i = 0; res == VK_SUCCESS; ++i)
2374 {
2375 BufferInfo newBufInfo;
2376 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2377 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2378 if(res == VK_SUCCESS)
2379 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002380 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002381 prevOffsetUpper = allocInfo.offset;
2382 bufInfo.push_back(newBufInfo);
2383 }
2384 }
2385
2386 // Destroy the buffers in reverse order.
2387 while(!bufInfo.empty())
2388 {
2389 const BufferInfo& currBufInfo = bufInfo.back();
2390 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2391 bufInfo.pop_back();
2392 }
2393 }
2394
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002395 // Test ring buffer with lost allocations.
2396 {
2397 // Allocate number of buffers until pool is full.
2398 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2399 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2400 res = VK_SUCCESS;
2401 for(size_t i = 0; res == VK_SUCCESS; ++i)
2402 {
2403 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2404
2405 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2406
2407 BufferInfo newBufInfo;
2408 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2409 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2410 if(res == VK_SUCCESS)
2411 bufInfo.push_back(newBufInfo);
2412 }
2413
2414 // Free first half of it.
2415 {
2416 const size_t buffersToDelete = bufInfo.size() / 2;
2417 for(size_t i = 0; i < buffersToDelete; ++i)
2418 {
2419 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2420 }
2421 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2422 }
2423
2424 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002425 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002426 res = VK_SUCCESS;
2427 for(size_t i = 0; res == VK_SUCCESS; ++i)
2428 {
2429 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2430
2431 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2432
2433 BufferInfo newBufInfo;
2434 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2435 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2436 if(res == VK_SUCCESS)
2437 bufInfo.push_back(newBufInfo);
2438 }
2439
2440 VkDeviceSize firstNewOffset;
2441 {
2442 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2443
2444 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2445 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2446 bufCreateInfo.size = bufSizeMax;
2447
2448 BufferInfo newBufInfo;
2449 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2450 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002451 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002452 bufInfo.push_back(newBufInfo);
2453 firstNewOffset = allocInfo.offset;
2454
2455 // Make sure at least one buffer from the beginning became lost.
2456 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002457 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002458 }
2459
2460 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2461 size_t newCount = 1;
2462 for(;;)
2463 {
2464 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2465
2466 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2467
2468 BufferInfo newBufInfo;
2469 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2470 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002471 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002472 bufInfo.push_back(newBufInfo);
2473 ++newCount;
2474 if(allocInfo.offset < firstNewOffset)
2475 break;
2476 }
2477
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002478 // Delete buffers that are lost.
2479 for(size_t i = bufInfo.size(); i--; )
2480 {
2481 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2482 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2483 {
2484 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2485 bufInfo.erase(bufInfo.begin() + i);
2486 }
2487 }
2488
2489 // Test vmaMakePoolAllocationsLost
2490 {
2491 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2492
2493 size_t lostAllocCount = SIZE_MAX;
2494 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002495 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002496
2497 size_t realLostAllocCount = 0;
2498 for(size_t i = 0; i < bufInfo.size(); ++i)
2499 {
2500 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2501 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2502 ++realLostAllocCount;
2503 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002504 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002505 }
2506
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002507 // Destroy all the buffers in forward order.
2508 for(size_t i = 0; i < bufInfo.size(); ++i)
2509 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2510 bufInfo.clear();
2511 }
2512
Adam Sawicki70a683e2018-08-24 15:36:32 +02002513 vmaDestroyPool(g_hAllocator, pool);
2514}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002515
Adam Sawicki70a683e2018-08-24 15:36:32 +02002516static void TestLinearAllocatorMultiBlock()
2517{
2518 wprintf(L"Test linear allocator multi block\n");
2519
2520 RandomNumberGenerator rand{345673};
2521
2522 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2523 sampleBufCreateInfo.size = 1024 * 1024;
2524 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2525
2526 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2527 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2528
2529 VmaPoolCreateInfo poolCreateInfo = {};
2530 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2531 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002532 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002533
2534 VmaPool pool = nullptr;
2535 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002536 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002537
2538 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2539
2540 VmaAllocationCreateInfo allocCreateInfo = {};
2541 allocCreateInfo.pool = pool;
2542
2543 std::vector<BufferInfo> bufInfo;
2544 VmaAllocationInfo allocInfo;
2545
2546 // Test one-time free.
2547 {
2548 // Allocate buffers until we move to a second block.
2549 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2550 for(uint32_t i = 0; ; ++i)
2551 {
2552 BufferInfo newBufInfo;
2553 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2554 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002555 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002556 bufInfo.push_back(newBufInfo);
2557 if(lastMem && allocInfo.deviceMemory != lastMem)
2558 {
2559 break;
2560 }
2561 lastMem = allocInfo.deviceMemory;
2562 }
2563
Adam Sawickib8d34d52018-10-03 17:41:20 +02002564 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002565
2566 // Make sure that pool has now two blocks.
2567 VmaPoolStats poolStats = {};
2568 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002569 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002570
2571 // Destroy all the buffers in random order.
2572 while(!bufInfo.empty())
2573 {
2574 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2575 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2576 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2577 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2578 }
2579
2580 // Make sure that pool has now at most one block.
2581 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002582 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002583 }
2584
2585 // Test stack.
2586 {
2587 // Allocate buffers until we move to a second block.
2588 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2589 for(uint32_t i = 0; ; ++i)
2590 {
2591 BufferInfo newBufInfo;
2592 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2593 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002594 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002595 bufInfo.push_back(newBufInfo);
2596 if(lastMem && allocInfo.deviceMemory != lastMem)
2597 {
2598 break;
2599 }
2600 lastMem = allocInfo.deviceMemory;
2601 }
2602
Adam Sawickib8d34d52018-10-03 17:41:20 +02002603 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002604
2605 // Add few more buffers.
2606 for(uint32_t i = 0; i < 5; ++i)
2607 {
2608 BufferInfo newBufInfo;
2609 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2610 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002611 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002612 bufInfo.push_back(newBufInfo);
2613 }
2614
2615 // Make sure that pool has now two blocks.
2616 VmaPoolStats poolStats = {};
2617 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002618 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002619
2620 // Delete half of buffers, LIFO.
2621 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2622 {
2623 const BufferInfo& currBufInfo = bufInfo.back();
2624 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2625 bufInfo.pop_back();
2626 }
2627
2628 // Add one more buffer.
2629 BufferInfo newBufInfo;
2630 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2631 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002632 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002633 bufInfo.push_back(newBufInfo);
2634
2635 // Make sure that pool has now one block.
2636 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002637 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002638
2639 // Delete all the remaining buffers, LIFO.
2640 while(!bufInfo.empty())
2641 {
2642 const BufferInfo& currBufInfo = bufInfo.back();
2643 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2644 bufInfo.pop_back();
2645 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002646 }
2647
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002648 vmaDestroyPool(g_hAllocator, pool);
2649}
2650
Adam Sawickifd11d752018-08-22 15:02:10 +02002651static void ManuallyTestLinearAllocator()
2652{
2653 VmaStats origStats;
2654 vmaCalculateStats(g_hAllocator, &origStats);
2655
2656 wprintf(L"Manually test linear allocator\n");
2657
2658 RandomNumberGenerator rand{645332};
2659
2660 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2661 sampleBufCreateInfo.size = 1024; // Whatever.
2662 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2663
2664 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2665 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2666
2667 VmaPoolCreateInfo poolCreateInfo = {};
2668 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002669 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002670
2671 poolCreateInfo.blockSize = 10 * 1024;
2672 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2673 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2674
2675 VmaPool pool = nullptr;
2676 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002677 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002678
2679 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2680
2681 VmaAllocationCreateInfo allocCreateInfo = {};
2682 allocCreateInfo.pool = pool;
2683
2684 std::vector<BufferInfo> bufInfo;
2685 VmaAllocationInfo allocInfo;
2686 BufferInfo newBufInfo;
2687
2688 // Test double stack.
2689 {
2690 /*
2691 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2692 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2693
2694 Totally:
2695 1 block allocated
2696 10240 Vulkan bytes
2697 6 new allocations
2698 2256 bytes in allocations
2699 */
2700
2701 bufCreateInfo.size = 32;
2702 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2703 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002704 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002705 bufInfo.push_back(newBufInfo);
2706
2707 bufCreateInfo.size = 1024;
2708 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2709 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002710 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002711 bufInfo.push_back(newBufInfo);
2712
2713 bufCreateInfo.size = 32;
2714 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2715 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002716 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002717 bufInfo.push_back(newBufInfo);
2718
2719 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2720
2721 bufCreateInfo.size = 128;
2722 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2723 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002724 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002725 bufInfo.push_back(newBufInfo);
2726
2727 bufCreateInfo.size = 1024;
2728 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2729 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002730 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002731 bufInfo.push_back(newBufInfo);
2732
2733 bufCreateInfo.size = 16;
2734 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2735 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002736 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002737 bufInfo.push_back(newBufInfo);
2738
2739 VmaStats currStats;
2740 vmaCalculateStats(g_hAllocator, &currStats);
2741 VmaPoolStats poolStats;
2742 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2743
2744 char* statsStr = nullptr;
2745 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2746
2747 // PUT BREAKPOINT HERE TO CHECK.
2748 // Inspect: currStats versus origStats, poolStats, statsStr.
2749 int I = 0;
2750
2751 vmaFreeStatsString(g_hAllocator, statsStr);
2752
2753 // Destroy the buffers in reverse order.
2754 while(!bufInfo.empty())
2755 {
2756 const BufferInfo& currBufInfo = bufInfo.back();
2757 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2758 bufInfo.pop_back();
2759 }
2760 }
2761
2762 vmaDestroyPool(g_hAllocator, pool);
2763}
2764
Adam Sawicki80927152018-09-07 17:27:23 +02002765static void BenchmarkAlgorithmsCase(FILE* file,
2766 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002767 bool empty,
2768 VmaAllocationCreateFlags allocStrategy,
2769 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002770{
2771 RandomNumberGenerator rand{16223};
2772
2773 const VkDeviceSize bufSizeMin = 32;
2774 const VkDeviceSize bufSizeMax = 1024;
2775 const size_t maxBufCapacity = 10000;
2776 const uint32_t iterationCount = 10;
2777
2778 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2779 sampleBufCreateInfo.size = bufSizeMax;
2780 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2781
2782 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2783 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2784
2785 VmaPoolCreateInfo poolCreateInfo = {};
2786 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002787 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002788
2789 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002790 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002791 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2792
2793 VmaPool pool = nullptr;
2794 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002795 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002796
2797 // Buffer created just to get memory requirements. Never bound to any memory.
2798 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2799 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002800 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002801
2802 VkMemoryRequirements memReq = {};
2803 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2804
2805 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2806
2807 VmaAllocationCreateInfo allocCreateInfo = {};
2808 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002809 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002810
2811 VmaAllocation alloc;
2812 std::vector<VmaAllocation> baseAllocations;
2813
2814 if(!empty)
2815 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002816 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002817 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002818 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002819 {
2820 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2821 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002822 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002823 baseAllocations.push_back(alloc);
2824 totalSize += memReq.size;
2825 }
2826
2827 // Delete half of them, choose randomly.
2828 size_t allocsToDelete = baseAllocations.size() / 2;
2829 for(size_t i = 0; i < allocsToDelete; ++i)
2830 {
2831 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2832 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2833 baseAllocations.erase(baseAllocations.begin() + index);
2834 }
2835 }
2836
2837 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002838 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002839 std::vector<VmaAllocation> testAllocations;
2840 testAllocations.reserve(allocCount);
2841 duration allocTotalDuration = duration::zero();
2842 duration freeTotalDuration = duration::zero();
2843 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2844 {
2845 // Allocations
2846 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2847 for(size_t i = 0; i < allocCount; ++i)
2848 {
2849 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2850 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002851 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002852 testAllocations.push_back(alloc);
2853 }
2854 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2855
2856 // Deallocations
2857 switch(freeOrder)
2858 {
2859 case FREE_ORDER::FORWARD:
2860 // Leave testAllocations unchanged.
2861 break;
2862 case FREE_ORDER::BACKWARD:
2863 std::reverse(testAllocations.begin(), testAllocations.end());
2864 break;
2865 case FREE_ORDER::RANDOM:
2866 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2867 break;
2868 default: assert(0);
2869 }
2870
2871 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2872 for(size_t i = 0; i < allocCount; ++i)
2873 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2874 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2875
2876 testAllocations.clear();
2877 }
2878
2879 // Delete baseAllocations
2880 while(!baseAllocations.empty())
2881 {
2882 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2883 baseAllocations.pop_back();
2884 }
2885
2886 vmaDestroyPool(g_hAllocator, pool);
2887
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002888 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2889 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2890
Adam Sawicki80927152018-09-07 17:27:23 +02002891 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2892 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002893 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002894 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002895 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002896 allocTotalSeconds,
2897 freeTotalSeconds);
2898
2899 if(file)
2900 {
2901 std::string currTime;
2902 CurrentTimeToStr(currTime);
2903
Adam Sawicki80927152018-09-07 17:27:23 +02002904 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002905 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002906 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002907 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002908 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002909 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2910 allocTotalSeconds,
2911 freeTotalSeconds);
2912 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002913}
2914
Adam Sawicki80927152018-09-07 17:27:23 +02002915static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002916{
Adam Sawicki80927152018-09-07 17:27:23 +02002917 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002918
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002919 if(file)
2920 {
2921 fprintf(file,
2922 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002923 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002924 "Allocation time (s),Deallocation time (s)\n");
2925 }
2926
Adam Sawicki0a607132018-08-24 11:18:41 +02002927 uint32_t freeOrderCount = 1;
2928 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2929 freeOrderCount = 3;
2930 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2931 freeOrderCount = 2;
2932
2933 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002934 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002935
2936 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2937 {
2938 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2939 switch(freeOrderIndex)
2940 {
2941 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2942 case 1: freeOrder = FREE_ORDER::FORWARD; break;
2943 case 2: freeOrder = FREE_ORDER::RANDOM; break;
2944 default: assert(0);
2945 }
2946
2947 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
2948 {
Adam Sawicki80927152018-09-07 17:27:23 +02002949 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02002950 {
Adam Sawicki80927152018-09-07 17:27:23 +02002951 uint32_t algorithm = 0;
2952 switch(algorithmIndex)
2953 {
2954 case 0:
2955 break;
2956 case 1:
2957 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
2958 break;
2959 case 2:
2960 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2961 break;
2962 default:
2963 assert(0);
2964 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002965
Adam Sawicki80927152018-09-07 17:27:23 +02002966 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002967 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
2968 {
2969 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02002970 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002971 {
2972 switch(allocStrategyIndex)
2973 {
2974 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
2975 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
2976 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
2977 default: assert(0);
2978 }
2979 }
2980
Adam Sawicki80927152018-09-07 17:27:23 +02002981 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002982 file,
Adam Sawicki80927152018-09-07 17:27:23 +02002983 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002984 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002985 strategy,
2986 freeOrder); // freeOrder
2987 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002988 }
2989 }
2990 }
2991}
2992
Adam Sawickib8333fb2018-03-13 16:15:53 +01002993static void TestPool_SameSize()
2994{
2995 const VkDeviceSize BUF_SIZE = 1024 * 1024;
2996 const size_t BUF_COUNT = 100;
2997 VkResult res;
2998
2999 RandomNumberGenerator rand{123};
3000
3001 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3002 bufferInfo.size = BUF_SIZE;
3003 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3004
3005 uint32_t memoryTypeBits = UINT32_MAX;
3006 {
3007 VkBuffer dummyBuffer;
3008 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003009 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003010
3011 VkMemoryRequirements memReq;
3012 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3013 memoryTypeBits = memReq.memoryTypeBits;
3014
3015 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3016 }
3017
3018 VmaAllocationCreateInfo poolAllocInfo = {};
3019 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3020 uint32_t memTypeIndex;
3021 res = vmaFindMemoryTypeIndex(
3022 g_hAllocator,
3023 memoryTypeBits,
3024 &poolAllocInfo,
3025 &memTypeIndex);
3026
3027 VmaPoolCreateInfo poolCreateInfo = {};
3028 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3029 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3030 poolCreateInfo.minBlockCount = 1;
3031 poolCreateInfo.maxBlockCount = 4;
3032 poolCreateInfo.frameInUseCount = 0;
3033
3034 VmaPool pool;
3035 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003036 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003037
3038 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3039
3040 VmaAllocationCreateInfo allocInfo = {};
3041 allocInfo.pool = pool;
3042 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3043 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3044
3045 struct BufItem
3046 {
3047 VkBuffer Buf;
3048 VmaAllocation Alloc;
3049 };
3050 std::vector<BufItem> items;
3051
3052 // Fill entire pool.
3053 for(size_t i = 0; i < BUF_COUNT; ++i)
3054 {
3055 BufItem item;
3056 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003057 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003058 items.push_back(item);
3059 }
3060
3061 // Make sure that another allocation would fail.
3062 {
3063 BufItem item;
3064 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003065 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003066 }
3067
3068 // Validate that no buffer is lost. Also check that they are not mapped.
3069 for(size_t i = 0; i < items.size(); ++i)
3070 {
3071 VmaAllocationInfo allocInfo;
3072 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003073 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3074 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003075 }
3076
3077 // Free some percent of random items.
3078 {
3079 const size_t PERCENT_TO_FREE = 10;
3080 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3081 for(size_t i = 0; i < itemsToFree; ++i)
3082 {
3083 size_t index = (size_t)rand.Generate() % items.size();
3084 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3085 items.erase(items.begin() + index);
3086 }
3087 }
3088
3089 // Randomly allocate and free items.
3090 {
3091 const size_t OPERATION_COUNT = BUF_COUNT;
3092 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3093 {
3094 bool allocate = rand.Generate() % 2 != 0;
3095 if(allocate)
3096 {
3097 if(items.size() < BUF_COUNT)
3098 {
3099 BufItem item;
3100 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003101 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003102 items.push_back(item);
3103 }
3104 }
3105 else // Free
3106 {
3107 if(!items.empty())
3108 {
3109 size_t index = (size_t)rand.Generate() % items.size();
3110 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3111 items.erase(items.begin() + index);
3112 }
3113 }
3114 }
3115 }
3116
3117 // Allocate up to maximum.
3118 while(items.size() < BUF_COUNT)
3119 {
3120 BufItem item;
3121 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003122 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003123 items.push_back(item);
3124 }
3125
3126 // Validate that no buffer is lost.
3127 for(size_t i = 0; i < items.size(); ++i)
3128 {
3129 VmaAllocationInfo allocInfo;
3130 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003131 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003132 }
3133
3134 // Next frame.
3135 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3136
3137 // Allocate another BUF_COUNT buffers.
3138 for(size_t i = 0; i < BUF_COUNT; ++i)
3139 {
3140 BufItem item;
3141 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003142 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003143 items.push_back(item);
3144 }
3145
3146 // Make sure the first BUF_COUNT is lost. Delete them.
3147 for(size_t i = 0; i < BUF_COUNT; ++i)
3148 {
3149 VmaAllocationInfo allocInfo;
3150 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003151 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003152 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3153 }
3154 items.erase(items.begin(), items.begin() + BUF_COUNT);
3155
3156 // Validate that no buffer is lost.
3157 for(size_t i = 0; i < items.size(); ++i)
3158 {
3159 VmaAllocationInfo allocInfo;
3160 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003161 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003162 }
3163
3164 // Free one item.
3165 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3166 items.pop_back();
3167
3168 // Validate statistics.
3169 {
3170 VmaPoolStats poolStats = {};
3171 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003172 TEST(poolStats.allocationCount == items.size());
3173 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3174 TEST(poolStats.unusedRangeCount == 1);
3175 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3176 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003177 }
3178
3179 // Free all remaining items.
3180 for(size_t i = items.size(); i--; )
3181 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3182 items.clear();
3183
3184 // Allocate maximum items again.
3185 for(size_t i = 0; i < BUF_COUNT; ++i)
3186 {
3187 BufItem item;
3188 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003189 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003190 items.push_back(item);
3191 }
3192
3193 // Delete every other item.
3194 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3195 {
3196 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3197 items.erase(items.begin() + i);
3198 }
3199
3200 // Defragment!
3201 {
3202 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3203 for(size_t i = 0; i < items.size(); ++i)
3204 allocationsToDefragment[i] = items[i].Alloc;
3205
3206 VmaDefragmentationStats defragmentationStats;
3207 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003208 TEST(res == VK_SUCCESS);
3209 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003210 }
3211
3212 // Free all remaining items.
3213 for(size_t i = items.size(); i--; )
3214 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3215 items.clear();
3216
3217 ////////////////////////////////////////////////////////////////////////////////
3218 // Test for vmaMakePoolAllocationsLost
3219
3220 // Allocate 4 buffers on frame 10.
3221 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3222 for(size_t i = 0; i < 4; ++i)
3223 {
3224 BufItem item;
3225 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003226 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003227 items.push_back(item);
3228 }
3229
3230 // Touch first 2 of them on frame 11.
3231 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3232 for(size_t i = 0; i < 2; ++i)
3233 {
3234 VmaAllocationInfo allocInfo;
3235 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3236 }
3237
3238 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3239 size_t lostCount = 0xDEADC0DE;
3240 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003241 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003242
3243 // Make another call. Now 0 should be lost.
3244 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003245 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003246
3247 // Make another call, with null count. Should not crash.
3248 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3249
3250 // END: Free all remaining items.
3251 for(size_t i = items.size(); i--; )
3252 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3253
3254 items.clear();
3255
Adam Sawickid2924172018-06-11 12:48:46 +02003256 ////////////////////////////////////////////////////////////////////////////////
3257 // Test for allocation too large for pool
3258
3259 {
3260 VmaAllocationCreateInfo allocCreateInfo = {};
3261 allocCreateInfo.pool = pool;
3262
3263 VkMemoryRequirements memReq;
3264 memReq.memoryTypeBits = UINT32_MAX;
3265 memReq.alignment = 1;
3266 memReq.size = poolCreateInfo.blockSize + 4;
3267
3268 VmaAllocation alloc = nullptr;
3269 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003270 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003271 }
3272
Adam Sawickib8333fb2018-03-13 16:15:53 +01003273 vmaDestroyPool(g_hAllocator, pool);
3274}
3275
Adam Sawickib0c36362018-11-13 16:17:38 +01003276static void TestResize()
3277{
3278 wprintf(L"Testing vmaResizeAllocation...\n");
3279
3280 const VkDeviceSize KILOBYTE = 1024ull;
3281 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3282
3283 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3284 bufCreateInfo.size = 2 * MEGABYTE;
3285 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3286
3287 VmaAllocationCreateInfo allocCreateInfo = {};
3288 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3289
3290 uint32_t memTypeIndex = UINT32_MAX;
3291 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3292
3293 VmaPoolCreateInfo poolCreateInfo = {};
3294 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3295 poolCreateInfo.blockSize = 8 * MEGABYTE;
3296 poolCreateInfo.minBlockCount = 1;
3297 poolCreateInfo.maxBlockCount = 1;
3298 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3299
3300 VmaPool pool;
3301 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3302
3303 allocCreateInfo.pool = pool;
3304
3305 // Fill 8 MB pool with 4 * 2 MB allocations.
3306 VmaAllocation allocs[4] = {};
3307
3308 VkMemoryRequirements memReq = {};
3309 memReq.memoryTypeBits = UINT32_MAX;
3310 memReq.alignment = 4;
3311 memReq.size = bufCreateInfo.size;
3312
3313 VmaAllocationInfo allocInfo = {};
3314
3315 for(uint32_t i = 0; i < 4; ++i)
3316 {
3317 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3318 }
3319
3320 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3321
3322 // Case: Resize to the same size always succeeds.
3323 {
3324 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3325 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3326 TEST(allocInfo.size == 2ull * 1024 * 1024);
3327 }
3328
3329 // Case: Shrink allocation at the end.
3330 {
3331 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3332 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3333 TEST(allocInfo.size == 1ull * 1024 * 1024);
3334 }
3335
3336 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3337
3338 // Case: Shrink allocation before free space.
3339 {
3340 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3341 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3342 TEST(allocInfo.size == 512 * KILOBYTE);
3343 }
3344
3345 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3346
3347 // Case: Shrink allocation before next allocation.
3348 {
3349 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3350 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3351 TEST(allocInfo.size == 1 * MEGABYTE);
3352 }
3353
3354 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3355
3356 // Case: Grow allocation while there is even more space available.
3357 {
3358 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3359 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3360 TEST(allocInfo.size == 1 * MEGABYTE);
3361 }
3362
3363 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3364
3365 // Case: Grow allocation while there is exact amount of free space available.
3366 {
3367 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3368 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3369 TEST(allocInfo.size == 2 * MEGABYTE);
3370 }
3371
3372 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3373
3374 // Case: Fail to grow when there is not enough free space due to next allocation.
3375 {
3376 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3377 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3378 TEST(allocInfo.size == 2 * MEGABYTE);
3379 }
3380
3381 // Case: Fail to grow when there is not enough free space due to end of memory block.
3382 {
3383 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3384 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3385 TEST(allocInfo.size == 1 * MEGABYTE);
3386 }
3387
3388 for(uint32_t i = 4; i--; )
3389 {
3390 vmaFreeMemory(g_hAllocator, allocs[i]);
3391 }
3392
3393 vmaDestroyPool(g_hAllocator, pool);
3394
3395 // Test dedicated allocation
3396 {
3397 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3398 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3399 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3400
3401 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3402 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3403
3404 // Case: Resize to the same size always succeeds.
3405 {
3406 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3407 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3408 TEST(allocInfo.size == 2ull * 1024 * 1024);
3409 }
3410
3411 // Case: Shrinking fails.
3412 {
3413 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3414 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3415 TEST(allocInfo.size == 2ull * 1024 * 1024);
3416 }
3417
3418 // Case: Growing fails.
3419 {
3420 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3421 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3422 TEST(allocInfo.size == 2ull * 1024 * 1024);
3423 }
3424
3425 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3426 }
3427}
3428
Adam Sawickie44c6262018-06-15 14:30:39 +02003429static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3430{
3431 const uint8_t* pBytes = (const uint8_t*)pMemory;
3432 for(size_t i = 0; i < size; ++i)
3433 {
3434 if(pBytes[i] != pattern)
3435 {
3436 return false;
3437 }
3438 }
3439 return true;
3440}
3441
3442static void TestAllocationsInitialization()
3443{
3444 VkResult res;
3445
3446 const size_t BUF_SIZE = 1024;
3447
3448 // Create pool.
3449
3450 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3451 bufInfo.size = BUF_SIZE;
3452 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3453
3454 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3455 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3456
3457 VmaPoolCreateInfo poolCreateInfo = {};
3458 poolCreateInfo.blockSize = BUF_SIZE * 10;
3459 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3460 poolCreateInfo.maxBlockCount = 1;
3461 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003462 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003463
3464 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3465 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003466 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003467
3468 // Create one persistently mapped buffer to keep memory of this block mapped,
3469 // so that pointer to mapped data will remain (more or less...) valid even
3470 // after destruction of other allocations.
3471
3472 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3473 VkBuffer firstBuf;
3474 VmaAllocation firstAlloc;
3475 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003476 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003477
3478 // Test buffers.
3479
3480 for(uint32_t i = 0; i < 2; ++i)
3481 {
3482 const bool persistentlyMapped = i == 0;
3483 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3484 VkBuffer buf;
3485 VmaAllocation alloc;
3486 VmaAllocationInfo allocInfo;
3487 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003488 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003489
3490 void* pMappedData;
3491 if(!persistentlyMapped)
3492 {
3493 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003494 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003495 }
3496 else
3497 {
3498 pMappedData = allocInfo.pMappedData;
3499 }
3500
3501 // Validate initialized content
3502 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003503 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003504
3505 if(!persistentlyMapped)
3506 {
3507 vmaUnmapMemory(g_hAllocator, alloc);
3508 }
3509
3510 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3511
3512 // Validate freed content
3513 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003514 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003515 }
3516
3517 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3518 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3519}
3520
Adam Sawickib8333fb2018-03-13 16:15:53 +01003521static void TestPool_Benchmark(
3522 PoolTestResult& outResult,
3523 const PoolTestConfig& config)
3524{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003525 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003526
3527 RandomNumberGenerator mainRand{config.RandSeed};
3528
3529 uint32_t allocationSizeProbabilitySum = std::accumulate(
3530 config.AllocationSizes.begin(),
3531 config.AllocationSizes.end(),
3532 0u,
3533 [](uint32_t sum, const AllocationSize& allocSize) {
3534 return sum + allocSize.Probability;
3535 });
3536
3537 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3538 bufferInfo.size = 256; // Whatever.
3539 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3540
3541 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3542 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3543 imageInfo.extent.width = 256; // Whatever.
3544 imageInfo.extent.height = 256; // Whatever.
3545 imageInfo.extent.depth = 1;
3546 imageInfo.mipLevels = 1;
3547 imageInfo.arrayLayers = 1;
3548 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3549 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3550 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3551 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3552 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3553
3554 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3555 {
3556 VkBuffer dummyBuffer;
3557 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003558 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003559
3560 VkMemoryRequirements memReq;
3561 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3562 bufferMemoryTypeBits = memReq.memoryTypeBits;
3563
3564 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3565 }
3566
3567 uint32_t imageMemoryTypeBits = UINT32_MAX;
3568 {
3569 VkImage dummyImage;
3570 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003571 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003572
3573 VkMemoryRequirements memReq;
3574 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3575 imageMemoryTypeBits = memReq.memoryTypeBits;
3576
3577 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3578 }
3579
3580 uint32_t memoryTypeBits = 0;
3581 if(config.UsesBuffers() && config.UsesImages())
3582 {
3583 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3584 if(memoryTypeBits == 0)
3585 {
3586 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3587 return;
3588 }
3589 }
3590 else if(config.UsesBuffers())
3591 memoryTypeBits = bufferMemoryTypeBits;
3592 else if(config.UsesImages())
3593 memoryTypeBits = imageMemoryTypeBits;
3594 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003595 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003596
3597 VmaPoolCreateInfo poolCreateInfo = {};
3598 poolCreateInfo.memoryTypeIndex = 0;
3599 poolCreateInfo.minBlockCount = 1;
3600 poolCreateInfo.maxBlockCount = 1;
3601 poolCreateInfo.blockSize = config.PoolSize;
3602 poolCreateInfo.frameInUseCount = 1;
3603
3604 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3605 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3606 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3607
3608 VmaPool pool;
3609 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003610 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003611
3612 // Start time measurement - after creating pool and initializing data structures.
3613 time_point timeBeg = std::chrono::high_resolution_clock::now();
3614
3615 ////////////////////////////////////////////////////////////////////////////////
3616 // ThreadProc
3617 auto ThreadProc = [&](
3618 PoolTestThreadResult* outThreadResult,
3619 uint32_t randSeed,
3620 HANDLE frameStartEvent,
3621 HANDLE frameEndEvent) -> void
3622 {
3623 RandomNumberGenerator threadRand{randSeed};
3624
3625 outThreadResult->AllocationTimeMin = duration::max();
3626 outThreadResult->AllocationTimeSum = duration::zero();
3627 outThreadResult->AllocationTimeMax = duration::min();
3628 outThreadResult->DeallocationTimeMin = duration::max();
3629 outThreadResult->DeallocationTimeSum = duration::zero();
3630 outThreadResult->DeallocationTimeMax = duration::min();
3631 outThreadResult->AllocationCount = 0;
3632 outThreadResult->DeallocationCount = 0;
3633 outThreadResult->LostAllocationCount = 0;
3634 outThreadResult->LostAllocationTotalSize = 0;
3635 outThreadResult->FailedAllocationCount = 0;
3636 outThreadResult->FailedAllocationTotalSize = 0;
3637
3638 struct Item
3639 {
3640 VkDeviceSize BufferSize;
3641 VkExtent2D ImageSize;
3642 VkBuffer Buf;
3643 VkImage Image;
3644 VmaAllocation Alloc;
3645
3646 VkDeviceSize CalcSizeBytes() const
3647 {
3648 return BufferSize +
3649 ImageSize.width * ImageSize.height * 4;
3650 }
3651 };
3652 std::vector<Item> unusedItems, usedItems;
3653
3654 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3655
3656 // Create all items - all unused, not yet allocated.
3657 for(size_t i = 0; i < threadTotalItemCount; ++i)
3658 {
3659 Item item = {};
3660
3661 uint32_t allocSizeIndex = 0;
3662 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3663 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3664 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3665
3666 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3667 if(allocSize.BufferSizeMax > 0)
3668 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003669 TEST(allocSize.BufferSizeMin > 0);
3670 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003671 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3672 item.BufferSize = allocSize.BufferSizeMin;
3673 else
3674 {
3675 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3676 item.BufferSize = item.BufferSize / 16 * 16;
3677 }
3678 }
3679 else
3680 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003681 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003682 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3683 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3684 else
3685 {
3686 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3687 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3688 }
3689 }
3690
3691 unusedItems.push_back(item);
3692 }
3693
3694 auto Allocate = [&](Item& item) -> VkResult
3695 {
3696 VmaAllocationCreateInfo allocCreateInfo = {};
3697 allocCreateInfo.pool = pool;
3698 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3699 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3700
3701 if(item.BufferSize)
3702 {
3703 bufferInfo.size = item.BufferSize;
3704 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3705 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3706 }
3707 else
3708 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003709 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003710
3711 imageInfo.extent.width = item.ImageSize.width;
3712 imageInfo.extent.height = item.ImageSize.height;
3713 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3714 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3715 }
3716 };
3717
3718 ////////////////////////////////////////////////////////////////////////////////
3719 // Frames
3720 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3721 {
3722 WaitForSingleObject(frameStartEvent, INFINITE);
3723
3724 // Always make some percent of used bufs unused, to choose different used ones.
3725 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3726 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3727 {
3728 size_t index = threadRand.Generate() % usedItems.size();
3729 unusedItems.push_back(usedItems[index]);
3730 usedItems.erase(usedItems.begin() + index);
3731 }
3732
3733 // Determine which bufs we want to use in this frame.
3734 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3735 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003736 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003737 // Move some used to unused.
3738 while(usedBufCount < usedItems.size())
3739 {
3740 size_t index = threadRand.Generate() % usedItems.size();
3741 unusedItems.push_back(usedItems[index]);
3742 usedItems.erase(usedItems.begin() + index);
3743 }
3744 // Move some unused to used.
3745 while(usedBufCount > usedItems.size())
3746 {
3747 size_t index = threadRand.Generate() % unusedItems.size();
3748 usedItems.push_back(unusedItems[index]);
3749 unusedItems.erase(unusedItems.begin() + index);
3750 }
3751
3752 uint32_t touchExistingCount = 0;
3753 uint32_t touchLostCount = 0;
3754 uint32_t createSucceededCount = 0;
3755 uint32_t createFailedCount = 0;
3756
3757 // Touch all used bufs. If not created or lost, allocate.
3758 for(size_t i = 0; i < usedItems.size(); ++i)
3759 {
3760 Item& item = usedItems[i];
3761 // Not yet created.
3762 if(item.Alloc == VK_NULL_HANDLE)
3763 {
3764 res = Allocate(item);
3765 ++outThreadResult->AllocationCount;
3766 if(res != VK_SUCCESS)
3767 {
3768 item.Alloc = VK_NULL_HANDLE;
3769 item.Buf = VK_NULL_HANDLE;
3770 ++outThreadResult->FailedAllocationCount;
3771 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3772 ++createFailedCount;
3773 }
3774 else
3775 ++createSucceededCount;
3776 }
3777 else
3778 {
3779 // Touch.
3780 VmaAllocationInfo allocInfo;
3781 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3782 // Lost.
3783 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3784 {
3785 ++touchLostCount;
3786
3787 // Destroy.
3788 {
3789 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3790 if(item.Buf)
3791 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3792 else
3793 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3794 ++outThreadResult->DeallocationCount;
3795 }
3796 item.Alloc = VK_NULL_HANDLE;
3797 item.Buf = VK_NULL_HANDLE;
3798
3799 ++outThreadResult->LostAllocationCount;
3800 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3801
3802 // Recreate.
3803 res = Allocate(item);
3804 ++outThreadResult->AllocationCount;
3805 // Creation failed.
3806 if(res != VK_SUCCESS)
3807 {
3808 ++outThreadResult->FailedAllocationCount;
3809 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3810 ++createFailedCount;
3811 }
3812 else
3813 ++createSucceededCount;
3814 }
3815 else
3816 ++touchExistingCount;
3817 }
3818 }
3819
3820 /*
3821 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3822 randSeed, frameIndex,
3823 touchExistingCount, touchLostCount,
3824 createSucceededCount, createFailedCount);
3825 */
3826
3827 SetEvent(frameEndEvent);
3828 }
3829
3830 // Free all remaining items.
3831 for(size_t i = usedItems.size(); i--; )
3832 {
3833 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3834 if(usedItems[i].Buf)
3835 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3836 else
3837 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3838 ++outThreadResult->DeallocationCount;
3839 }
3840 for(size_t i = unusedItems.size(); i--; )
3841 {
3842 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3843 if(unusedItems[i].Buf)
3844 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3845 else
3846 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3847 ++outThreadResult->DeallocationCount;
3848 }
3849 };
3850
3851 // Launch threads.
3852 uint32_t threadRandSeed = mainRand.Generate();
3853 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3854 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3855 std::vector<std::thread> bkgThreads;
3856 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3857 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3858 {
3859 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3860 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3861 bkgThreads.emplace_back(std::bind(
3862 ThreadProc,
3863 &threadResults[threadIndex],
3864 threadRandSeed + threadIndex,
3865 frameStartEvents[threadIndex],
3866 frameEndEvents[threadIndex]));
3867 }
3868
3869 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003870 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003871 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3872 {
3873 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3874 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3875 SetEvent(frameStartEvents[threadIndex]);
3876 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3877 }
3878
3879 // Wait for threads finished
3880 for(size_t i = 0; i < bkgThreads.size(); ++i)
3881 {
3882 bkgThreads[i].join();
3883 CloseHandle(frameEndEvents[i]);
3884 CloseHandle(frameStartEvents[i]);
3885 }
3886 bkgThreads.clear();
3887
3888 // Finish time measurement - before destroying pool.
3889 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3890
3891 vmaDestroyPool(g_hAllocator, pool);
3892
3893 outResult.AllocationTimeMin = duration::max();
3894 outResult.AllocationTimeAvg = duration::zero();
3895 outResult.AllocationTimeMax = duration::min();
3896 outResult.DeallocationTimeMin = duration::max();
3897 outResult.DeallocationTimeAvg = duration::zero();
3898 outResult.DeallocationTimeMax = duration::min();
3899 outResult.LostAllocationCount = 0;
3900 outResult.LostAllocationTotalSize = 0;
3901 outResult.FailedAllocationCount = 0;
3902 outResult.FailedAllocationTotalSize = 0;
3903 size_t allocationCount = 0;
3904 size_t deallocationCount = 0;
3905 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3906 {
3907 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3908 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3909 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3910 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3911 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3912 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3913 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3914 allocationCount += threadResult.AllocationCount;
3915 deallocationCount += threadResult.DeallocationCount;
3916 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3917 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3918 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3919 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3920 }
3921 if(allocationCount)
3922 outResult.AllocationTimeAvg /= allocationCount;
3923 if(deallocationCount)
3924 outResult.DeallocationTimeAvg /= deallocationCount;
3925}
3926
3927static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3928{
3929 if(ptr1 < ptr2)
3930 return ptr1 + size1 > ptr2;
3931 else if(ptr2 < ptr1)
3932 return ptr2 + size2 > ptr1;
3933 else
3934 return true;
3935}
3936
3937static void TestMapping()
3938{
3939 wprintf(L"Testing mapping...\n");
3940
3941 VkResult res;
3942 uint32_t memTypeIndex = UINT32_MAX;
3943
3944 enum TEST
3945 {
3946 TEST_NORMAL,
3947 TEST_POOL,
3948 TEST_DEDICATED,
3949 TEST_COUNT
3950 };
3951 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3952 {
3953 VmaPool pool = nullptr;
3954 if(testIndex == TEST_POOL)
3955 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003956 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003957 VmaPoolCreateInfo poolInfo = {};
3958 poolInfo.memoryTypeIndex = memTypeIndex;
3959 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003960 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003961 }
3962
3963 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3964 bufInfo.size = 0x10000;
3965 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3966
3967 VmaAllocationCreateInfo allocCreateInfo = {};
3968 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3969 allocCreateInfo.pool = pool;
3970 if(testIndex == TEST_DEDICATED)
3971 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3972
3973 VmaAllocationInfo allocInfo;
3974
3975 // Mapped manually
3976
3977 // Create 2 buffers.
3978 BufferInfo bufferInfos[3];
3979 for(size_t i = 0; i < 2; ++i)
3980 {
3981 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3982 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003983 TEST(res == VK_SUCCESS);
3984 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003985 memTypeIndex = allocInfo.memoryType;
3986 }
3987
3988 // Map buffer 0.
3989 char* data00 = nullptr;
3990 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003991 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003992 data00[0xFFFF] = data00[0];
3993
3994 // Map buffer 0 second time.
3995 char* data01 = nullptr;
3996 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003997 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003998
3999 // Map buffer 1.
4000 char* data1 = nullptr;
4001 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004002 TEST(res == VK_SUCCESS && data1 != nullptr);
4003 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004004 data1[0xFFFF] = data1[0];
4005
4006 // Unmap buffer 0 two times.
4007 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4008 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4009 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004010 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004011
4012 // Unmap buffer 1.
4013 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4014 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004015 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004016
4017 // Create 3rd buffer - persistently mapped.
4018 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4019 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4020 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004021 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004022
4023 // Map buffer 2.
4024 char* data2 = nullptr;
4025 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004026 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004027 data2[0xFFFF] = data2[0];
4028
4029 // Unmap buffer 2.
4030 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4031 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004032 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004033
4034 // Destroy all buffers.
4035 for(size_t i = 3; i--; )
4036 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4037
4038 vmaDestroyPool(g_hAllocator, pool);
4039 }
4040}
4041
4042static void TestMappingMultithreaded()
4043{
4044 wprintf(L"Testing mapping multithreaded...\n");
4045
4046 static const uint32_t threadCount = 16;
4047 static const uint32_t bufferCount = 1024;
4048 static const uint32_t threadBufferCount = bufferCount / threadCount;
4049
4050 VkResult res;
4051 volatile uint32_t memTypeIndex = UINT32_MAX;
4052
4053 enum TEST
4054 {
4055 TEST_NORMAL,
4056 TEST_POOL,
4057 TEST_DEDICATED,
4058 TEST_COUNT
4059 };
4060 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4061 {
4062 VmaPool pool = nullptr;
4063 if(testIndex == TEST_POOL)
4064 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004065 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004066 VmaPoolCreateInfo poolInfo = {};
4067 poolInfo.memoryTypeIndex = memTypeIndex;
4068 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004069 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004070 }
4071
4072 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4073 bufCreateInfo.size = 0x10000;
4074 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4075
4076 VmaAllocationCreateInfo allocCreateInfo = {};
4077 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4078 allocCreateInfo.pool = pool;
4079 if(testIndex == TEST_DEDICATED)
4080 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4081
4082 std::thread threads[threadCount];
4083 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4084 {
4085 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4086 // ======== THREAD FUNCTION ========
4087
4088 RandomNumberGenerator rand{threadIndex};
4089
4090 enum class MODE
4091 {
4092 // Don't map this buffer at all.
4093 DONT_MAP,
4094 // Map and quickly unmap.
4095 MAP_FOR_MOMENT,
4096 // Map and unmap before destruction.
4097 MAP_FOR_LONGER,
4098 // Map two times. Quickly unmap, second unmap before destruction.
4099 MAP_TWO_TIMES,
4100 // Create this buffer as persistently mapped.
4101 PERSISTENTLY_MAPPED,
4102 COUNT
4103 };
4104 std::vector<BufferInfo> bufInfos{threadBufferCount};
4105 std::vector<MODE> bufModes{threadBufferCount};
4106
4107 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4108 {
4109 BufferInfo& bufInfo = bufInfos[bufferIndex];
4110 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4111 bufModes[bufferIndex] = mode;
4112
4113 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4114 if(mode == MODE::PERSISTENTLY_MAPPED)
4115 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4116
4117 VmaAllocationInfo allocInfo;
4118 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4119 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004120 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004121
4122 if(memTypeIndex == UINT32_MAX)
4123 memTypeIndex = allocInfo.memoryType;
4124
4125 char* data = nullptr;
4126
4127 if(mode == MODE::PERSISTENTLY_MAPPED)
4128 {
4129 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004130 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004131 }
4132 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4133 mode == MODE::MAP_TWO_TIMES)
4134 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004135 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004136 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004137 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004138
4139 if(mode == MODE::MAP_TWO_TIMES)
4140 {
4141 char* data2 = nullptr;
4142 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004143 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004144 }
4145 }
4146 else if(mode == MODE::DONT_MAP)
4147 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004148 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004149 }
4150 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004151 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004152
4153 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4154 if(data)
4155 data[0xFFFF] = data[0];
4156
4157 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4158 {
4159 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4160
4161 VmaAllocationInfo allocInfo;
4162 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4163 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004164 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004165 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004166 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004167 }
4168
4169 switch(rand.Generate() % 3)
4170 {
4171 case 0: Sleep(0); break; // Yield.
4172 case 1: Sleep(10); break; // 10 ms
4173 // default: No sleep.
4174 }
4175
4176 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4177 if(data)
4178 data[0xFFFF] = data[0];
4179 }
4180
4181 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4182 {
4183 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4184 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4185 {
4186 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4187
4188 VmaAllocationInfo allocInfo;
4189 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004190 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004191 }
4192
4193 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4194 }
4195 });
4196 }
4197
4198 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4199 threads[threadIndex].join();
4200
4201 vmaDestroyPool(g_hAllocator, pool);
4202 }
4203}
4204
4205static void WriteMainTestResultHeader(FILE* file)
4206{
4207 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004208 "Code,Time,"
4209 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004210 "Total Time (us),"
4211 "Allocation Time Min (us),"
4212 "Allocation Time Avg (us),"
4213 "Allocation Time Max (us),"
4214 "Deallocation Time Min (us),"
4215 "Deallocation Time Avg (us),"
4216 "Deallocation Time Max (us),"
4217 "Total Memory Allocated (B),"
4218 "Free Range Size Avg (B),"
4219 "Free Range Size Max (B)\n");
4220}
4221
4222static void WriteMainTestResult(
4223 FILE* file,
4224 const char* codeDescription,
4225 const char* testDescription,
4226 const Config& config, const Result& result)
4227{
4228 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4229 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4230 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4231 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4232 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4233 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4234 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4235
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004236 std::string currTime;
4237 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004238
4239 fprintf(file,
4240 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004241 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4242 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004243 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004244 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004245 totalTimeSeconds * 1e6f,
4246 allocationTimeMinSeconds * 1e6f,
4247 allocationTimeAvgSeconds * 1e6f,
4248 allocationTimeMaxSeconds * 1e6f,
4249 deallocationTimeMinSeconds * 1e6f,
4250 deallocationTimeAvgSeconds * 1e6f,
4251 deallocationTimeMaxSeconds * 1e6f,
4252 result.TotalMemoryAllocated,
4253 result.FreeRangeSizeAvg,
4254 result.FreeRangeSizeMax);
4255}
4256
4257static void WritePoolTestResultHeader(FILE* file)
4258{
4259 fprintf(file,
4260 "Code,Test,Time,"
4261 "Config,"
4262 "Total Time (us),"
4263 "Allocation Time Min (us),"
4264 "Allocation Time Avg (us),"
4265 "Allocation Time Max (us),"
4266 "Deallocation Time Min (us),"
4267 "Deallocation Time Avg (us),"
4268 "Deallocation Time Max (us),"
4269 "Lost Allocation Count,"
4270 "Lost Allocation Total Size (B),"
4271 "Failed Allocation Count,"
4272 "Failed Allocation Total Size (B)\n");
4273}
4274
4275static void WritePoolTestResult(
4276 FILE* file,
4277 const char* codeDescription,
4278 const char* testDescription,
4279 const PoolTestConfig& config,
4280 const PoolTestResult& result)
4281{
4282 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4283 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4284 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4285 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4286 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4287 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4288 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4289
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004290 std::string currTime;
4291 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004292
4293 fprintf(file,
4294 "%s,%s,%s,"
4295 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4296 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4297 // General
4298 codeDescription,
4299 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004300 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004301 // Config
4302 config.ThreadCount,
4303 (unsigned long long)config.PoolSize,
4304 config.FrameCount,
4305 config.TotalItemCount,
4306 config.UsedItemCountMin,
4307 config.UsedItemCountMax,
4308 config.ItemsToMakeUnusedPercent,
4309 // Results
4310 totalTimeSeconds * 1e6f,
4311 allocationTimeMinSeconds * 1e6f,
4312 allocationTimeAvgSeconds * 1e6f,
4313 allocationTimeMaxSeconds * 1e6f,
4314 deallocationTimeMinSeconds * 1e6f,
4315 deallocationTimeAvgSeconds * 1e6f,
4316 deallocationTimeMaxSeconds * 1e6f,
4317 result.LostAllocationCount,
4318 result.LostAllocationTotalSize,
4319 result.FailedAllocationCount,
4320 result.FailedAllocationTotalSize);
4321}
4322
4323static void PerformCustomMainTest(FILE* file)
4324{
4325 Config config{};
4326 config.RandSeed = 65735476;
4327 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4328 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4329 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4330 config.FreeOrder = FREE_ORDER::FORWARD;
4331 config.ThreadCount = 16;
4332 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004333 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004334
4335 // Buffers
4336 //config.AllocationSizes.push_back({4, 16, 1024});
4337 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4338
4339 // Images
4340 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4341 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4342
4343 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4344 config.AdditionalOperationCount = 1024;
4345
4346 Result result{};
4347 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004348 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004349 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4350}
4351
4352static void PerformCustomPoolTest(FILE* file)
4353{
4354 PoolTestConfig config;
4355 config.PoolSize = 100 * 1024 * 1024;
4356 config.RandSeed = 2345764;
4357 config.ThreadCount = 1;
4358 config.FrameCount = 200;
4359 config.ItemsToMakeUnusedPercent = 2;
4360
4361 AllocationSize allocSize = {};
4362 allocSize.BufferSizeMin = 1024;
4363 allocSize.BufferSizeMax = 1024 * 1024;
4364 allocSize.Probability = 1;
4365 config.AllocationSizes.push_back(allocSize);
4366
4367 allocSize.BufferSizeMin = 0;
4368 allocSize.BufferSizeMax = 0;
4369 allocSize.ImageSizeMin = 128;
4370 allocSize.ImageSizeMax = 1024;
4371 allocSize.Probability = 1;
4372 config.AllocationSizes.push_back(allocSize);
4373
4374 config.PoolSize = config.CalcAvgResourceSize() * 200;
4375 config.UsedItemCountMax = 160;
4376 config.TotalItemCount = config.UsedItemCountMax * 10;
4377 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4378
4379 g_MemoryAliasingWarningEnabled = false;
4380 PoolTestResult result = {};
4381 TestPool_Benchmark(result, config);
4382 g_MemoryAliasingWarningEnabled = true;
4383
4384 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4385}
4386
Adam Sawickib8333fb2018-03-13 16:15:53 +01004387static void PerformMainTests(FILE* file)
4388{
4389 uint32_t repeatCount = 1;
4390 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4391
4392 Config config{};
4393 config.RandSeed = 65735476;
4394 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4395 config.FreeOrder = FREE_ORDER::FORWARD;
4396
4397 size_t threadCountCount = 1;
4398 switch(ConfigType)
4399 {
4400 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4401 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4402 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4403 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4404 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4405 default: assert(0);
4406 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004407
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004408 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004409
Adam Sawickib8333fb2018-03-13 16:15:53 +01004410 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4411 {
4412 std::string desc1;
4413
4414 switch(threadCountIndex)
4415 {
4416 case 0:
4417 desc1 += "1_thread";
4418 config.ThreadCount = 1;
4419 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4420 break;
4421 case 1:
4422 desc1 += "16_threads+0%_common";
4423 config.ThreadCount = 16;
4424 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4425 break;
4426 case 2:
4427 desc1 += "16_threads+50%_common";
4428 config.ThreadCount = 16;
4429 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4430 break;
4431 case 3:
4432 desc1 += "16_threads+100%_common";
4433 config.ThreadCount = 16;
4434 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4435 break;
4436 case 4:
4437 desc1 += "2_threads+0%_common";
4438 config.ThreadCount = 2;
4439 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4440 break;
4441 case 5:
4442 desc1 += "2_threads+50%_common";
4443 config.ThreadCount = 2;
4444 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4445 break;
4446 case 6:
4447 desc1 += "2_threads+100%_common";
4448 config.ThreadCount = 2;
4449 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4450 break;
4451 default:
4452 assert(0);
4453 }
4454
4455 // 0 = buffers, 1 = images, 2 = buffers and images
4456 size_t buffersVsImagesCount = 2;
4457 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4458 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4459 {
4460 std::string desc2 = desc1;
4461 switch(buffersVsImagesIndex)
4462 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004463 case 0: desc2 += ",Buffers"; break;
4464 case 1: desc2 += ",Images"; break;
4465 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004466 default: assert(0);
4467 }
4468
4469 // 0 = small, 1 = large, 2 = small and large
4470 size_t smallVsLargeCount = 2;
4471 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4472 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4473 {
4474 std::string desc3 = desc2;
4475 switch(smallVsLargeIndex)
4476 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004477 case 0: desc3 += ",Small"; break;
4478 case 1: desc3 += ",Large"; break;
4479 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004480 default: assert(0);
4481 }
4482
4483 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4484 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4485 else
4486 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4487
4488 // 0 = varying sizes min...max, 1 = set of constant sizes
4489 size_t constantSizesCount = 1;
4490 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4491 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4492 {
4493 std::string desc4 = desc3;
4494 switch(constantSizesIndex)
4495 {
4496 case 0: desc4 += " Varying_sizes"; break;
4497 case 1: desc4 += " Constant_sizes"; break;
4498 default: assert(0);
4499 }
4500
4501 config.AllocationSizes.clear();
4502 // Buffers present
4503 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4504 {
4505 // Small
4506 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4507 {
4508 // Varying size
4509 if(constantSizesIndex == 0)
4510 config.AllocationSizes.push_back({4, 16, 1024});
4511 // Constant sizes
4512 else
4513 {
4514 config.AllocationSizes.push_back({1, 16, 16});
4515 config.AllocationSizes.push_back({1, 64, 64});
4516 config.AllocationSizes.push_back({1, 256, 256});
4517 config.AllocationSizes.push_back({1, 1024, 1024});
4518 }
4519 }
4520 // Large
4521 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4522 {
4523 // Varying size
4524 if(constantSizesIndex == 0)
4525 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4526 // Constant sizes
4527 else
4528 {
4529 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4530 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4531 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4532 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4533 }
4534 }
4535 }
4536 // Images present
4537 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4538 {
4539 // Small
4540 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4541 {
4542 // Varying size
4543 if(constantSizesIndex == 0)
4544 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4545 // Constant sizes
4546 else
4547 {
4548 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4549 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4550 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4551 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4552 }
4553 }
4554 // Large
4555 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4556 {
4557 // Varying size
4558 if(constantSizesIndex == 0)
4559 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4560 // Constant sizes
4561 else
4562 {
4563 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4564 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4565 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4566 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4567 }
4568 }
4569 }
4570
4571 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4572 size_t beginBytesToAllocateCount = 1;
4573 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4574 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4575 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4576 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4577 {
4578 std::string desc5 = desc4;
4579
4580 switch(beginBytesToAllocateIndex)
4581 {
4582 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004583 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004584 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4585 config.AdditionalOperationCount = 0;
4586 break;
4587 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004588 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004589 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4590 config.AdditionalOperationCount = 1024;
4591 break;
4592 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004593 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004594 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4595 config.AdditionalOperationCount = 1024;
4596 break;
4597 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004598 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004599 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4600 config.AdditionalOperationCount = 1024;
4601 break;
4602 default:
4603 assert(0);
4604 }
4605
Adam Sawicki0667e332018-08-24 17:26:44 +02004606 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004607 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004608 std::string desc6 = desc5;
4609 switch(strategyIndex)
4610 {
4611 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004612 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004613 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4614 break;
4615 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004616 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004617 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4618 break;
4619 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004620 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004621 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4622 break;
4623 default:
4624 assert(0);
4625 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004626
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004627 desc6 += ',';
4628 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004629
4630 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004631
4632 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4633 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004634 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004635
4636 Result result{};
4637 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004638 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004639 if(file)
4640 {
4641 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4642 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004643 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004644 }
4645 }
4646 }
4647 }
4648 }
4649 }
4650}
4651
4652static void PerformPoolTests(FILE* file)
4653{
4654 const size_t AVG_RESOURCES_PER_POOL = 300;
4655
4656 uint32_t repeatCount = 1;
4657 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4658
4659 PoolTestConfig config{};
4660 config.RandSeed = 2346343;
4661 config.FrameCount = 200;
4662 config.ItemsToMakeUnusedPercent = 2;
4663
4664 size_t threadCountCount = 1;
4665 switch(ConfigType)
4666 {
4667 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4668 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4669 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4670 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4671 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4672 default: assert(0);
4673 }
4674 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4675 {
4676 std::string desc1;
4677
4678 switch(threadCountIndex)
4679 {
4680 case 0:
4681 desc1 += "1_thread";
4682 config.ThreadCount = 1;
4683 break;
4684 case 1:
4685 desc1 += "16_threads";
4686 config.ThreadCount = 16;
4687 break;
4688 case 2:
4689 desc1 += "2_threads";
4690 config.ThreadCount = 2;
4691 break;
4692 default:
4693 assert(0);
4694 }
4695
4696 // 0 = buffers, 1 = images, 2 = buffers and images
4697 size_t buffersVsImagesCount = 2;
4698 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4699 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4700 {
4701 std::string desc2 = desc1;
4702 switch(buffersVsImagesIndex)
4703 {
4704 case 0: desc2 += " Buffers"; break;
4705 case 1: desc2 += " Images"; break;
4706 case 2: desc2 += " Buffers+Images"; break;
4707 default: assert(0);
4708 }
4709
4710 // 0 = small, 1 = large, 2 = small and large
4711 size_t smallVsLargeCount = 2;
4712 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4713 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4714 {
4715 std::string desc3 = desc2;
4716 switch(smallVsLargeIndex)
4717 {
4718 case 0: desc3 += " Small"; break;
4719 case 1: desc3 += " Large"; break;
4720 case 2: desc3 += " Small+Large"; break;
4721 default: assert(0);
4722 }
4723
4724 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4725 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4726 else
4727 config.PoolSize = 4ull * 1024 * 1024;
4728
4729 // 0 = varying sizes min...max, 1 = set of constant sizes
4730 size_t constantSizesCount = 1;
4731 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4732 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4733 {
4734 std::string desc4 = desc3;
4735 switch(constantSizesIndex)
4736 {
4737 case 0: desc4 += " Varying_sizes"; break;
4738 case 1: desc4 += " Constant_sizes"; break;
4739 default: assert(0);
4740 }
4741
4742 config.AllocationSizes.clear();
4743 // Buffers present
4744 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4745 {
4746 // Small
4747 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4748 {
4749 // Varying size
4750 if(constantSizesIndex == 0)
4751 config.AllocationSizes.push_back({4, 16, 1024});
4752 // Constant sizes
4753 else
4754 {
4755 config.AllocationSizes.push_back({1, 16, 16});
4756 config.AllocationSizes.push_back({1, 64, 64});
4757 config.AllocationSizes.push_back({1, 256, 256});
4758 config.AllocationSizes.push_back({1, 1024, 1024});
4759 }
4760 }
4761 // Large
4762 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4763 {
4764 // Varying size
4765 if(constantSizesIndex == 0)
4766 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4767 // Constant sizes
4768 else
4769 {
4770 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4771 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4772 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4773 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4774 }
4775 }
4776 }
4777 // Images present
4778 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4779 {
4780 // Small
4781 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4782 {
4783 // Varying size
4784 if(constantSizesIndex == 0)
4785 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4786 // Constant sizes
4787 else
4788 {
4789 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4790 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4791 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4792 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4793 }
4794 }
4795 // Large
4796 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4797 {
4798 // Varying size
4799 if(constantSizesIndex == 0)
4800 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4801 // Constant sizes
4802 else
4803 {
4804 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4805 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4806 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4807 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4808 }
4809 }
4810 }
4811
4812 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4813 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4814
4815 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4816 size_t subscriptionModeCount;
4817 switch(ConfigType)
4818 {
4819 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4820 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4821 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4822 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4823 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4824 default: assert(0);
4825 }
4826 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4827 {
4828 std::string desc5 = desc4;
4829
4830 switch(subscriptionModeIndex)
4831 {
4832 case 0:
4833 desc5 += " Subscription_66%";
4834 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4835 break;
4836 case 1:
4837 desc5 += " Subscription_133%";
4838 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4839 break;
4840 case 2:
4841 desc5 += " Subscription_100%";
4842 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4843 break;
4844 case 3:
4845 desc5 += " Subscription_33%";
4846 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4847 break;
4848 case 4:
4849 desc5 += " Subscription_166%";
4850 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4851 break;
4852 default:
4853 assert(0);
4854 }
4855
4856 config.TotalItemCount = config.UsedItemCountMax * 5;
4857 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4858
4859 const char* testDescription = desc5.c_str();
4860
4861 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4862 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004863 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004864
4865 PoolTestResult result{};
4866 g_MemoryAliasingWarningEnabled = false;
4867 TestPool_Benchmark(result, config);
4868 g_MemoryAliasingWarningEnabled = true;
4869 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4870 }
4871 }
4872 }
4873 }
4874 }
4875 }
4876}
4877
Adam Sawickia83793a2018-09-03 13:40:42 +02004878static void BasicTestBuddyAllocator()
4879{
4880 wprintf(L"Basic test buddy allocator\n");
4881
4882 RandomNumberGenerator rand{76543};
4883
4884 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4885 sampleBufCreateInfo.size = 1024; // Whatever.
4886 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4887
4888 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4889 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4890
4891 VmaPoolCreateInfo poolCreateInfo = {};
4892 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004893 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004894
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004895 // Deliberately adding 1023 to test usable size smaller than memory block size.
4896 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004897 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004898 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004899
4900 VmaPool pool = nullptr;
4901 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004902 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004903
4904 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4905
4906 VmaAllocationCreateInfo allocCreateInfo = {};
4907 allocCreateInfo.pool = pool;
4908
4909 std::vector<BufferInfo> bufInfo;
4910 BufferInfo newBufInfo;
4911 VmaAllocationInfo allocInfo;
4912
4913 bufCreateInfo.size = 1024 * 256;
4914 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4915 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004916 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004917 bufInfo.push_back(newBufInfo);
4918
4919 bufCreateInfo.size = 1024 * 512;
4920 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4921 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004922 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004923 bufInfo.push_back(newBufInfo);
4924
4925 bufCreateInfo.size = 1024 * 128;
4926 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4927 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004928 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004929 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004930
4931 // Test very small allocation, smaller than minimum node size.
4932 bufCreateInfo.size = 1;
4933 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4934 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004935 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004936 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004937
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004938 // Test some small allocation with alignment requirement.
4939 {
4940 VkMemoryRequirements memReq;
4941 memReq.alignment = 256;
4942 memReq.memoryTypeBits = UINT32_MAX;
4943 memReq.size = 32;
4944
4945 newBufInfo.Buffer = VK_NULL_HANDLE;
4946 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4947 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004948 TEST(res == VK_SUCCESS);
4949 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004950 bufInfo.push_back(newBufInfo);
4951 }
4952
4953 //SaveAllocatorStatsToFile(L"TEST.json");
4954
Adam Sawicki21017c62018-09-07 15:26:59 +02004955 VmaPoolStats stats = {};
4956 vmaGetPoolStats(g_hAllocator, pool, &stats);
4957 int DBG = 0; // Set breakpoint here to inspect `stats`.
4958
Adam Sawicki80927152018-09-07 17:27:23 +02004959 // Allocate enough new buffers to surely fall into second block.
4960 for(uint32_t i = 0; i < 32; ++i)
4961 {
4962 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4963 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4964 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004965 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004966 bufInfo.push_back(newBufInfo);
4967 }
4968
4969 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4970
Adam Sawickia83793a2018-09-03 13:40:42 +02004971 // Destroy the buffers in random order.
4972 while(!bufInfo.empty())
4973 {
4974 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4975 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4976 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4977 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4978 }
4979
4980 vmaDestroyPool(g_hAllocator, pool);
4981}
4982
Adam Sawickif2975342018-10-16 13:49:02 +02004983// Test the testing environment.
4984static void TestGpuData()
4985{
4986 RandomNumberGenerator rand = { 53434 };
4987
4988 std::vector<AllocInfo> allocInfo;
4989
4990 for(size_t i = 0; i < 100; ++i)
4991 {
4992 AllocInfo info = {};
4993
4994 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4995 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
4996 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
4997 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4998 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
4999
5000 VmaAllocationCreateInfo allocCreateInfo = {};
5001 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5002
5003 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5004 TEST(res == VK_SUCCESS);
5005
5006 info.m_StartValue = rand.Generate();
5007
5008 allocInfo.push_back(std::move(info));
5009 }
5010
5011 UploadGpuData(allocInfo.data(), allocInfo.size());
5012
5013 ValidateGpuData(allocInfo.data(), allocInfo.size());
5014
5015 DestroyAllAllocations(allocInfo);
5016}
5017
Adam Sawickib8333fb2018-03-13 16:15:53 +01005018void Test()
5019{
5020 wprintf(L"TESTING:\n");
5021
Adam Sawicki52076eb2018-11-22 16:14:50 +01005022 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005023 {
5024 // # Temporarily insert custom tests here
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02005025 // ########################################
5026 // ########################################
Adam Sawicki80927152018-09-07 17:27:23 +02005027
Adam Sawicki52076eb2018-11-22 16:14:50 +01005028 TestDefragmentationWholePool();
5029 //TestDefragmentationSimple();
5030 //TestDefragmentationFull();
5031 //TestDefragmentationGpu(0);
5032 //TestDefragmentationGpu(VMA_DEFRAGMENTATION_FAST_ALGORITHM_BIT);
5033 //TestDefragmentationGpu(VMA_DEFRAGMENTATION_OPTIMAL_ALGORITHM_BIT);
Adam Sawicki70a683e2018-08-24 15:36:32 +02005034 return;
5035 }
5036
Adam Sawickib8333fb2018-03-13 16:15:53 +01005037 // # Simple tests
5038
5039 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005040 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005041#if VMA_DEBUG_MARGIN
5042 TestDebugMargin();
5043#else
5044 TestPool_SameSize();
5045 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01005046 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02005047#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005048#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5049 TestAllocationsInitialization();
5050#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005051 TestMapping();
5052 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005053 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005054 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005055 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005056
Adam Sawicki4338f662018-09-07 14:12:37 +02005057 BasicTestBuddyAllocator();
5058
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005059 {
5060 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005061 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005062 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005063 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005064 fclose(file);
5065 }
5066
Adam Sawickib8333fb2018-03-13 16:15:53 +01005067 TestDefragmentationSimple();
5068 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005069 TestDefragmentationWholePool();
Adam Sawickic6ede152018-11-16 17:04:14 +01005070 TestDefragmentationGpu(0);
5071 TestDefragmentationGpu(VMA_DEFRAGMENTATION_FAST_ALGORITHM_BIT);
5072 TestDefragmentationGpu(VMA_DEFRAGMENTATION_OPTIMAL_ALGORITHM_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005073
5074 // # Detailed tests
5075 FILE* file;
5076 fopen_s(&file, "Results.csv", "w");
5077 assert(file != NULL);
5078
5079 WriteMainTestResultHeader(file);
5080 PerformMainTests(file);
5081 //PerformCustomMainTest(file);
5082
5083 WritePoolTestResultHeader(file);
5084 PerformPoolTests(file);
5085 //PerformCustomPoolTest(file);
5086
5087 fclose(file);
5088
5089 wprintf(L"Done.\n");
5090}
5091
Adam Sawickif1a793c2018-03-13 15:42:22 +01005092#endif // #ifdef _WIN32