blob: 2003f5d403937f623aa3f2e1ea3e3f70cec6af51 [file] [log] [blame]
Adam Sawickif1a793c2018-03-13 15:42:22 +01001#include "Tests.h"
2#include "VmaUsage.h"
3#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004#include <atomic>
5#include <thread>
6#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +01007
8#ifdef _WIN32
9
Adam Sawicki33d2ce72018-08-27 13:59:13 +020010static const char* CODE_DESCRIPTION = "Foo";
11
Adam Sawickif2975342018-10-16 13:49:02 +020012extern VkCommandBuffer g_hTemporaryCommandBuffer;
13void BeginSingleTimeCommands();
14void EndSingleTimeCommands();
15
Adam Sawicki0a607132018-08-24 11:18:41 +020016enum CONFIG_TYPE {
17 CONFIG_TYPE_MINIMUM,
18 CONFIG_TYPE_SMALL,
19 CONFIG_TYPE_AVERAGE,
20 CONFIG_TYPE_LARGE,
21 CONFIG_TYPE_MAXIMUM,
22 CONFIG_TYPE_COUNT
23};
24
Adam Sawickif2975342018-10-16 13:49:02 +020025static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
26//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020027
Adam Sawickib8333fb2018-03-13 16:15:53 +010028enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
29
Adam Sawicki0667e332018-08-24 17:26:44 +020030static const char* FREE_ORDER_NAMES[] = {
31 "FORWARD",
32 "BACKWARD",
33 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020034};
35
Adam Sawicki80927152018-09-07 17:27:23 +020036// Copy of internal VmaAlgorithmToStr.
37static const char* AlgorithmToStr(uint32_t algorithm)
38{
39 switch(algorithm)
40 {
41 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
42 return "Linear";
43 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
44 return "Buddy";
45 case 0:
46 return "Default";
47 default:
48 assert(0);
49 return "";
50 }
51}
52
Adam Sawickib8333fb2018-03-13 16:15:53 +010053struct AllocationSize
54{
55 uint32_t Probability;
56 VkDeviceSize BufferSizeMin, BufferSizeMax;
57 uint32_t ImageSizeMin, ImageSizeMax;
58};
59
60struct Config
61{
62 uint32_t RandSeed;
63 VkDeviceSize BeginBytesToAllocate;
64 uint32_t AdditionalOperationCount;
65 VkDeviceSize MaxBytesToAllocate;
66 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
67 std::vector<AllocationSize> AllocationSizes;
68 uint32_t ThreadCount;
69 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
70 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020071 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010072};
73
74struct Result
75{
76 duration TotalTime;
77 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
78 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
79 VkDeviceSize TotalMemoryAllocated;
80 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
81};
82
83void TestDefragmentationSimple();
84void TestDefragmentationFull();
85
86struct PoolTestConfig
87{
88 uint32_t RandSeed;
89 uint32_t ThreadCount;
90 VkDeviceSize PoolSize;
91 uint32_t FrameCount;
92 uint32_t TotalItemCount;
93 // Range for number of items used in each frame.
94 uint32_t UsedItemCountMin, UsedItemCountMax;
95 // Percent of items to make unused, and possibly make some others used in each frame.
96 uint32_t ItemsToMakeUnusedPercent;
97 std::vector<AllocationSize> AllocationSizes;
98
99 VkDeviceSize CalcAvgResourceSize() const
100 {
101 uint32_t probabilitySum = 0;
102 VkDeviceSize sizeSum = 0;
103 for(size_t i = 0; i < AllocationSizes.size(); ++i)
104 {
105 const AllocationSize& allocSize = AllocationSizes[i];
106 if(allocSize.BufferSizeMax > 0)
107 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
108 else
109 {
110 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
111 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
112 }
113 probabilitySum += allocSize.Probability;
114 }
115 return sizeSum / probabilitySum;
116 }
117
118 bool UsesBuffers() const
119 {
120 for(size_t i = 0; i < AllocationSizes.size(); ++i)
121 if(AllocationSizes[i].BufferSizeMax > 0)
122 return true;
123 return false;
124 }
125
126 bool UsesImages() const
127 {
128 for(size_t i = 0; i < AllocationSizes.size(); ++i)
129 if(AllocationSizes[i].ImageSizeMax > 0)
130 return true;
131 return false;
132 }
133};
134
135struct PoolTestResult
136{
137 duration TotalTime;
138 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
139 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
140 size_t LostAllocationCount, LostAllocationTotalSize;
141 size_t FailedAllocationCount, FailedAllocationTotalSize;
142};
143
144static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
145
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200146static uint32_t g_FrameIndex = 0;
147
Adam Sawickib8333fb2018-03-13 16:15:53 +0100148struct BufferInfo
149{
150 VkBuffer Buffer = VK_NULL_HANDLE;
151 VmaAllocation Allocation = VK_NULL_HANDLE;
152};
153
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200154static uint32_t GetAllocationStrategyCount()
155{
156 uint32_t strategyCount = 0;
157 switch(ConfigType)
158 {
159 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
160 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
161 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
162 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
163 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
164 default: assert(0);
165 }
166 return strategyCount;
167}
168
169static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
170{
171 switch(allocStrategy)
172 {
173 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
174 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
175 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
176 case 0: return "Default"; break;
177 default: assert(0); return "";
178 }
179}
180
Adam Sawickib8333fb2018-03-13 16:15:53 +0100181static void InitResult(Result& outResult)
182{
183 outResult.TotalTime = duration::zero();
184 outResult.AllocationTimeMin = duration::max();
185 outResult.AllocationTimeAvg = duration::zero();
186 outResult.AllocationTimeMax = duration::min();
187 outResult.DeallocationTimeMin = duration::max();
188 outResult.DeallocationTimeAvg = duration::zero();
189 outResult.DeallocationTimeMax = duration::min();
190 outResult.TotalMemoryAllocated = 0;
191 outResult.FreeRangeSizeAvg = 0;
192 outResult.FreeRangeSizeMax = 0;
193}
194
195class TimeRegisterObj
196{
197public:
198 TimeRegisterObj(duration& min, duration& sum, duration& max) :
199 m_Min(min),
200 m_Sum(sum),
201 m_Max(max),
202 m_TimeBeg(std::chrono::high_resolution_clock::now())
203 {
204 }
205
206 ~TimeRegisterObj()
207 {
208 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
209 m_Sum += d;
210 if(d < m_Min) m_Min = d;
211 if(d > m_Max) m_Max = d;
212 }
213
214private:
215 duration& m_Min;
216 duration& m_Sum;
217 duration& m_Max;
218 time_point m_TimeBeg;
219};
220
221struct PoolTestThreadResult
222{
223 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
224 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
225 size_t AllocationCount, DeallocationCount;
226 size_t LostAllocationCount, LostAllocationTotalSize;
227 size_t FailedAllocationCount, FailedAllocationTotalSize;
228};
229
230class AllocationTimeRegisterObj : public TimeRegisterObj
231{
232public:
233 AllocationTimeRegisterObj(Result& result) :
234 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
235 {
236 }
237};
238
239class DeallocationTimeRegisterObj : public TimeRegisterObj
240{
241public:
242 DeallocationTimeRegisterObj(Result& result) :
243 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
244 {
245 }
246};
247
248class PoolAllocationTimeRegisterObj : public TimeRegisterObj
249{
250public:
251 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
252 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
253 {
254 }
255};
256
257class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
258{
259public:
260 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
261 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
262 {
263 }
264};
265
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200266static void CurrentTimeToStr(std::string& out)
267{
268 time_t rawTime; time(&rawTime);
269 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
270 char timeStr[128];
271 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
272 out = timeStr;
273}
274
Adam Sawickib8333fb2018-03-13 16:15:53 +0100275VkResult MainTest(Result& outResult, const Config& config)
276{
277 assert(config.ThreadCount > 0);
278
279 InitResult(outResult);
280
281 RandomNumberGenerator mainRand{config.RandSeed};
282
283 time_point timeBeg = std::chrono::high_resolution_clock::now();
284
285 std::atomic<size_t> allocationCount = 0;
286 VkResult res = VK_SUCCESS;
287
288 uint32_t memUsageProbabilitySum =
289 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
290 config.MemUsageProbability[2] + config.MemUsageProbability[3];
291 assert(memUsageProbabilitySum > 0);
292
293 uint32_t allocationSizeProbabilitySum = std::accumulate(
294 config.AllocationSizes.begin(),
295 config.AllocationSizes.end(),
296 0u,
297 [](uint32_t sum, const AllocationSize& allocSize) {
298 return sum + allocSize.Probability;
299 });
300
301 struct Allocation
302 {
303 VkBuffer Buffer;
304 VkImage Image;
305 VmaAllocation Alloc;
306 };
307
308 std::vector<Allocation> commonAllocations;
309 std::mutex commonAllocationsMutex;
310
311 auto Allocate = [&](
312 VkDeviceSize bufferSize,
313 const VkExtent2D imageExtent,
314 RandomNumberGenerator& localRand,
315 VkDeviceSize& totalAllocatedBytes,
316 std::vector<Allocation>& allocations) -> VkResult
317 {
318 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
319
320 uint32_t memUsageIndex = 0;
321 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
322 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
323 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
324
325 VmaAllocationCreateInfo memReq = {};
326 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200327 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100328
329 Allocation allocation = {};
330 VmaAllocationInfo allocationInfo;
331
332 // Buffer
333 if(bufferSize > 0)
334 {
335 assert(imageExtent.width == 0);
336 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
337 bufferInfo.size = bufferSize;
338 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
339
340 {
341 AllocationTimeRegisterObj timeRegisterObj{outResult};
342 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
343 }
344 }
345 // Image
346 else
347 {
348 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
349 imageInfo.imageType = VK_IMAGE_TYPE_2D;
350 imageInfo.extent.width = imageExtent.width;
351 imageInfo.extent.height = imageExtent.height;
352 imageInfo.extent.depth = 1;
353 imageInfo.mipLevels = 1;
354 imageInfo.arrayLayers = 1;
355 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
356 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
357 VK_IMAGE_TILING_OPTIMAL :
358 VK_IMAGE_TILING_LINEAR;
359 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
360 switch(memReq.usage)
361 {
362 case VMA_MEMORY_USAGE_GPU_ONLY:
363 switch(localRand.Generate() % 3)
364 {
365 case 0:
366 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
367 break;
368 case 1:
369 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
370 break;
371 case 2:
372 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
373 break;
374 }
375 break;
376 case VMA_MEMORY_USAGE_CPU_ONLY:
377 case VMA_MEMORY_USAGE_CPU_TO_GPU:
378 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
379 break;
380 case VMA_MEMORY_USAGE_GPU_TO_CPU:
381 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
382 break;
383 }
384 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
385 imageInfo.flags = 0;
386
387 {
388 AllocationTimeRegisterObj timeRegisterObj{outResult};
389 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
390 }
391 }
392
393 if(res == VK_SUCCESS)
394 {
395 ++allocationCount;
396 totalAllocatedBytes += allocationInfo.size;
397 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
398 if(useCommonAllocations)
399 {
400 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
401 commonAllocations.push_back(allocation);
402 }
403 else
404 allocations.push_back(allocation);
405 }
406 else
407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200408 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100409 }
410 return res;
411 };
412
413 auto GetNextAllocationSize = [&](
414 VkDeviceSize& outBufSize,
415 VkExtent2D& outImageSize,
416 RandomNumberGenerator& localRand)
417 {
418 outBufSize = 0;
419 outImageSize = {0, 0};
420
421 uint32_t allocSizeIndex = 0;
422 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
423 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
424 r -= config.AllocationSizes[allocSizeIndex++].Probability;
425
426 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
427 if(allocSize.BufferSizeMax > 0)
428 {
429 assert(allocSize.ImageSizeMax == 0);
430 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
431 outBufSize = allocSize.BufferSizeMin;
432 else
433 {
434 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
435 outBufSize = outBufSize / 16 * 16;
436 }
437 }
438 else
439 {
440 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
441 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
442 else
443 {
444 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
445 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
446 }
447 }
448 };
449
450 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
451 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
452
453 auto ThreadProc = [&](uint32_t randSeed) -> void
454 {
455 RandomNumberGenerator threadRand(randSeed);
456 VkDeviceSize threadTotalAllocatedBytes = 0;
457 std::vector<Allocation> threadAllocations;
458 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
459 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
460 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
461
462 // BEGIN ALLOCATIONS
463 for(;;)
464 {
465 VkDeviceSize bufferSize = 0;
466 VkExtent2D imageExtent = {};
467 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
468 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
469 threadBeginBytesToAllocate)
470 {
471 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
472 break;
473 }
474 else
475 break;
476 }
477
478 // ADDITIONAL ALLOCATIONS AND FREES
479 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
480 {
481 VkDeviceSize bufferSize = 0;
482 VkExtent2D imageExtent = {};
483 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
484
485 // true = allocate, false = free
486 bool allocate = threadRand.Generate() % 2 != 0;
487
488 if(allocate)
489 {
490 if(threadTotalAllocatedBytes +
491 bufferSize +
492 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
493 threadMaxBytesToAllocate)
494 {
495 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
496 break;
497 }
498 }
499 else
500 {
501 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
502 if(useCommonAllocations)
503 {
504 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
505 if(!commonAllocations.empty())
506 {
507 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
508 VmaAllocationInfo allocationInfo;
509 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
510 if(threadTotalAllocatedBytes >= allocationInfo.size)
511 {
512 DeallocationTimeRegisterObj timeRegisterObj{outResult};
513 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
514 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
515 else
516 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
517 threadTotalAllocatedBytes -= allocationInfo.size;
518 commonAllocations.erase(commonAllocations.begin() + indexToFree);
519 }
520 }
521 }
522 else
523 {
524 if(!threadAllocations.empty())
525 {
526 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
527 VmaAllocationInfo allocationInfo;
528 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
529 if(threadTotalAllocatedBytes >= allocationInfo.size)
530 {
531 DeallocationTimeRegisterObj timeRegisterObj{outResult};
532 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
533 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
534 else
535 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
536 threadTotalAllocatedBytes -= allocationInfo.size;
537 threadAllocations.erase(threadAllocations.begin() + indexToFree);
538 }
539 }
540 }
541 }
542 }
543
544 ++numThreadsReachedMaxAllocations;
545
546 WaitForSingleObject(threadsFinishEvent, INFINITE);
547
548 // DEALLOCATION
549 while(!threadAllocations.empty())
550 {
551 size_t indexToFree = 0;
552 switch(config.FreeOrder)
553 {
554 case FREE_ORDER::FORWARD:
555 indexToFree = 0;
556 break;
557 case FREE_ORDER::BACKWARD:
558 indexToFree = threadAllocations.size() - 1;
559 break;
560 case FREE_ORDER::RANDOM:
561 indexToFree = mainRand.Generate() % threadAllocations.size();
562 break;
563 }
564
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 }
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 };
575
576 uint32_t threadRandSeed = mainRand.Generate();
577 std::vector<std::thread> bkgThreads;
578 for(size_t i = 0; i < config.ThreadCount; ++i)
579 {
580 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
581 }
582
583 // Wait for threads reached max allocations
584 while(numThreadsReachedMaxAllocations < config.ThreadCount)
585 Sleep(0);
586
587 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
588 VmaStats vmaStats = {};
589 vmaCalculateStats(g_hAllocator, &vmaStats);
590 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
591 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
592 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
593
594 // Signal threads to deallocate
595 SetEvent(threadsFinishEvent);
596
597 // Wait for threads finished
598 for(size_t i = 0; i < bkgThreads.size(); ++i)
599 bkgThreads[i].join();
600 bkgThreads.clear();
601
602 CloseHandle(threadsFinishEvent);
603
604 // Deallocate remaining common resources
605 while(!commonAllocations.empty())
606 {
607 size_t indexToFree = 0;
608 switch(config.FreeOrder)
609 {
610 case FREE_ORDER::FORWARD:
611 indexToFree = 0;
612 break;
613 case FREE_ORDER::BACKWARD:
614 indexToFree = commonAllocations.size() - 1;
615 break;
616 case FREE_ORDER::RANDOM:
617 indexToFree = mainRand.Generate() % commonAllocations.size();
618 break;
619 }
620
621 {
622 DeallocationTimeRegisterObj timeRegisterObj{outResult};
623 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
624 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
625 else
626 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
627 }
628 commonAllocations.erase(commonAllocations.begin() + indexToFree);
629 }
630
631 if(allocationCount)
632 {
633 outResult.AllocationTimeAvg /= allocationCount;
634 outResult.DeallocationTimeAvg /= allocationCount;
635 }
636
637 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
638
639 return res;
640}
641
Adam Sawickie44c6262018-06-15 14:30:39 +0200642static void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100643{
644 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200645 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100646 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200647 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100648}
649
650struct AllocInfo
651{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200652 VmaAllocation m_Allocation = VK_NULL_HANDLE;
653 VkBuffer m_Buffer = VK_NULL_HANDLE;
654 VkImage m_Image = VK_NULL_HANDLE;
655 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100656 union
657 {
658 VkBufferCreateInfo m_BufferInfo;
659 VkImageCreateInfo m_ImageInfo;
660 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200661
662 void CreateBuffer(
663 const VkBufferCreateInfo& bufCreateInfo,
664 const VmaAllocationCreateInfo& allocCreateInfo);
665 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100666};
667
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200668void AllocInfo::CreateBuffer(
669 const VkBufferCreateInfo& bufCreateInfo,
670 const VmaAllocationCreateInfo& allocCreateInfo)
671{
672 m_BufferInfo = bufCreateInfo;
673 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
674 TEST(res == VK_SUCCESS);
675}
676
677void AllocInfo::Destroy()
678{
679 if(m_Image)
680 {
681 vkDestroyImage(g_hDevice, m_Image, nullptr);
682 }
683 if(m_Buffer)
684 {
685 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
686 }
687 if(m_Allocation)
688 {
689 vmaFreeMemory(g_hAllocator, m_Allocation);
690 }
691}
692
Adam Sawickif2975342018-10-16 13:49:02 +0200693class StagingBufferCollection
694{
695public:
696 StagingBufferCollection() { }
697 ~StagingBufferCollection();
698 // Returns false if maximum total size of buffers would be exceeded.
699 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
700 void ReleaseAllBuffers();
701
702private:
703 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
704 struct BufInfo
705 {
706 VmaAllocation Allocation = VK_NULL_HANDLE;
707 VkBuffer Buffer = VK_NULL_HANDLE;
708 VkDeviceSize Size = VK_WHOLE_SIZE;
709 void* MappedPtr = nullptr;
710 bool Used = false;
711 };
712 std::vector<BufInfo> m_Bufs;
713 // Including both used and unused.
714 VkDeviceSize m_TotalSize = 0;
715};
716
717StagingBufferCollection::~StagingBufferCollection()
718{
719 for(size_t i = m_Bufs.size(); i--; )
720 {
721 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
722 }
723}
724
725bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
726{
727 assert(size <= MAX_TOTAL_SIZE);
728
729 // Try to find existing unused buffer with best size.
730 size_t bestIndex = SIZE_MAX;
731 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
732 {
733 BufInfo& currBufInfo = m_Bufs[i];
734 if(!currBufInfo.Used && currBufInfo.Size >= size &&
735 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
736 {
737 bestIndex = i;
738 }
739 }
740
741 if(bestIndex != SIZE_MAX)
742 {
743 m_Bufs[bestIndex].Used = true;
744 outBuffer = m_Bufs[bestIndex].Buffer;
745 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
746 return true;
747 }
748
749 // Allocate new buffer with requested size.
750 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
751 {
752 BufInfo bufInfo;
753 bufInfo.Size = size;
754 bufInfo.Used = true;
755
756 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
757 bufCreateInfo.size = size;
758 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
759
760 VmaAllocationCreateInfo allocCreateInfo = {};
761 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
762 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
763
764 VmaAllocationInfo allocInfo;
765 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
766 bufInfo.MappedPtr = allocInfo.pMappedData;
767 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
768
769 outBuffer = bufInfo.Buffer;
770 outMappedPtr = bufInfo.MappedPtr;
771
772 m_Bufs.push_back(std::move(bufInfo));
773
774 m_TotalSize += size;
775
776 return true;
777 }
778
779 // There are some unused but smaller buffers: Free them and try again.
780 bool hasUnused = false;
781 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
782 {
783 if(!m_Bufs[i].Used)
784 {
785 hasUnused = true;
786 break;
787 }
788 }
789 if(hasUnused)
790 {
791 for(size_t i = m_Bufs.size(); i--; )
792 {
793 if(!m_Bufs[i].Used)
794 {
795 m_TotalSize -= m_Bufs[i].Size;
796 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
797 m_Bufs.erase(m_Bufs.begin() + i);
798 }
799 }
800
801 return AcquireBuffer(size, outBuffer, outMappedPtr);
802 }
803
804 return false;
805}
806
807void StagingBufferCollection::ReleaseAllBuffers()
808{
809 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
810 {
811 m_Bufs[i].Used = false;
812 }
813}
814
815static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
816{
817 StagingBufferCollection stagingBufs;
818
819 bool cmdBufferStarted = false;
820 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
821 {
822 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
823 if(currAllocInfo.m_Buffer)
824 {
825 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
826
827 VkBuffer stagingBuf = VK_NULL_HANDLE;
828 void* stagingBufMappedPtr = nullptr;
829 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
830 {
831 TEST(cmdBufferStarted);
832 EndSingleTimeCommands();
833 stagingBufs.ReleaseAllBuffers();
834 cmdBufferStarted = false;
835
836 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
837 TEST(ok);
838 }
839
840 // Fill staging buffer.
841 {
842 assert(size % sizeof(uint32_t) == 0);
843 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
844 uint32_t val = currAllocInfo.m_StartValue;
845 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
846 {
847 *stagingValPtr = val;
848 ++stagingValPtr;
849 ++val;
850 }
851 }
852
853 // Issue copy command from staging buffer to destination buffer.
854 if(!cmdBufferStarted)
855 {
856 cmdBufferStarted = true;
857 BeginSingleTimeCommands();
858 }
859
860 VkBufferCopy copy = {};
861 copy.srcOffset = 0;
862 copy.dstOffset = 0;
863 copy.size = size;
864 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
865 }
866 else
867 {
868 TEST(0 && "Images not currently supported.");
869 }
870 }
871
872 if(cmdBufferStarted)
873 {
874 EndSingleTimeCommands();
875 stagingBufs.ReleaseAllBuffers();
876 }
877}
878
879static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
880{
881 StagingBufferCollection stagingBufs;
882
883 bool cmdBufferStarted = false;
884 size_t validateAllocIndexOffset = 0;
885 std::vector<void*> validateStagingBuffers;
886 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
887 {
888 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
889 if(currAllocInfo.m_Buffer)
890 {
891 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
892
893 VkBuffer stagingBuf = VK_NULL_HANDLE;
894 void* stagingBufMappedPtr = nullptr;
895 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
896 {
897 TEST(cmdBufferStarted);
898 EndSingleTimeCommands();
899 cmdBufferStarted = false;
900
901 for(size_t validateIndex = 0;
902 validateIndex < validateStagingBuffers.size();
903 ++validateIndex)
904 {
905 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
906 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
907 TEST(validateSize % sizeof(uint32_t) == 0);
908 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
909 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
910 bool valid = true;
911 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
912 {
913 if(*stagingValPtr != val)
914 {
915 valid = false;
916 break;
917 }
918 ++stagingValPtr;
919 ++val;
920 }
921 TEST(valid);
922 }
923
924 stagingBufs.ReleaseAllBuffers();
925
926 validateAllocIndexOffset = allocInfoIndex;
927 validateStagingBuffers.clear();
928
929 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
930 TEST(ok);
931 }
932
933 // Issue copy command from staging buffer to destination buffer.
934 if(!cmdBufferStarted)
935 {
936 cmdBufferStarted = true;
937 BeginSingleTimeCommands();
938 }
939
940 VkBufferCopy copy = {};
941 copy.srcOffset = 0;
942 copy.dstOffset = 0;
943 copy.size = size;
944 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
945
946 // Sava mapped pointer for later validation.
947 validateStagingBuffers.push_back(stagingBufMappedPtr);
948 }
949 else
950 {
951 TEST(0 && "Images not currently supported.");
952 }
953 }
954
955 if(cmdBufferStarted)
956 {
957 EndSingleTimeCommands();
958
959 for(size_t validateIndex = 0;
960 validateIndex < validateStagingBuffers.size();
961 ++validateIndex)
962 {
963 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
964 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
965 TEST(validateSize % sizeof(uint32_t) == 0);
966 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
967 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
968 bool valid = true;
969 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
970 {
971 if(*stagingValPtr != val)
972 {
973 valid = false;
974 break;
975 }
976 ++stagingValPtr;
977 ++val;
978 }
979 TEST(valid);
980 }
981
982 stagingBufs.ReleaseAllBuffers();
983 }
984}
985
Adam Sawickib8333fb2018-03-13 16:15:53 +0100986static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
987{
988 outMemReq = {};
989 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
990 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
991}
992
993static void CreateBuffer(
994 VmaPool pool,
995 const VkBufferCreateInfo& bufCreateInfo,
996 bool persistentlyMapped,
997 AllocInfo& outAllocInfo)
998{
999 outAllocInfo = {};
1000 outAllocInfo.m_BufferInfo = bufCreateInfo;
1001
1002 VmaAllocationCreateInfo allocCreateInfo = {};
1003 allocCreateInfo.pool = pool;
1004 if(persistentlyMapped)
1005 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1006
1007 VmaAllocationInfo vmaAllocInfo = {};
1008 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1009
1010 // Setup StartValue and fill.
1011 {
1012 outAllocInfo.m_StartValue = (uint32_t)rand();
1013 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001014 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015 if(!persistentlyMapped)
1016 {
1017 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1018 }
1019
1020 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001021 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001022 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1023 data[i] = value++;
1024
1025 if(!persistentlyMapped)
1026 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1027 }
1028}
1029
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001030static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001031{
1032 outAllocation.m_Allocation = nullptr;
1033 outAllocation.m_Buffer = nullptr;
1034 outAllocation.m_Image = nullptr;
1035 outAllocation.m_StartValue = (uint32_t)rand();
1036
1037 VmaAllocationCreateInfo vmaMemReq;
1038 GetMemReq(vmaMemReq);
1039
1040 VmaAllocationInfo allocInfo;
1041
1042 const bool isBuffer = true;//(rand() & 0x1) != 0;
1043 const bool isLarge = (rand() % 16) == 0;
1044 if(isBuffer)
1045 {
1046 const uint32_t bufferSize = isLarge ?
1047 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1048 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1049
1050 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1051 bufferInfo.size = bufferSize;
1052 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1053
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001054 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001055 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001056 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001057 }
1058 else
1059 {
1060 const uint32_t imageSizeX = isLarge ?
1061 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1062 rand() % 1024 + 1; // 1 ... 1024
1063 const uint32_t imageSizeY = isLarge ?
1064 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1065 rand() % 1024 + 1; // 1 ... 1024
1066
1067 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1068 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1069 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1070 imageInfo.extent.width = imageSizeX;
1071 imageInfo.extent.height = imageSizeY;
1072 imageInfo.extent.depth = 1;
1073 imageInfo.mipLevels = 1;
1074 imageInfo.arrayLayers = 1;
1075 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1076 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1077 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1078 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1079
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001080 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001081 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001082 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 }
1084
1085 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1086 if(allocInfo.pMappedData == nullptr)
1087 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001088 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001089 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001090 }
1091
1092 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001093 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001094 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1095 data[i] = value++;
1096
1097 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001098 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001099}
1100
1101static void DestroyAllocation(const AllocInfo& allocation)
1102{
1103 if(allocation.m_Buffer)
1104 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1105 else
1106 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1107}
1108
1109static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1110{
1111 for(size_t i = allocations.size(); i--; )
1112 DestroyAllocation(allocations[i]);
1113 allocations.clear();
1114}
1115
1116static void ValidateAllocationData(const AllocInfo& allocation)
1117{
1118 VmaAllocationInfo allocInfo;
1119 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1120
1121 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1122 if(allocInfo.pMappedData == nullptr)
1123 {
1124 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126 }
1127
1128 uint32_t value = allocation.m_StartValue;
1129 bool ok = true;
1130 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001131 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001132 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1133 {
1134 if(data[i] != value++)
1135 {
1136 ok = false;
1137 break;
1138 }
1139 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001140 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001141
1142 if(allocInfo.pMappedData == nullptr)
1143 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1144}
1145
1146static void RecreateAllocationResource(AllocInfo& allocation)
1147{
1148 VmaAllocationInfo allocInfo;
1149 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1150
1151 if(allocation.m_Buffer)
1152 {
1153 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1154
1155 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001156 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001157
1158 // Just to silence validation layer warnings.
1159 VkMemoryRequirements vkMemReq;
1160 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001161 TEST(vkMemReq.size == allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001162
1163 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001164 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001165 }
1166 else
1167 {
1168 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1169
1170 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001171 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001172
1173 // Just to silence validation layer warnings.
1174 VkMemoryRequirements vkMemReq;
1175 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1176
1177 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001178 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001179 }
1180}
1181
1182static void Defragment(AllocInfo* allocs, size_t allocCount,
1183 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1184 VmaDefragmentationStats* defragmentationStats = nullptr)
1185{
1186 std::vector<VmaAllocation> vmaAllocs(allocCount);
1187 for(size_t i = 0; i < allocCount; ++i)
1188 vmaAllocs[i] = allocs[i].m_Allocation;
1189
1190 std::vector<VkBool32> allocChanged(allocCount);
1191
1192 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1193 defragmentationInfo, defragmentationStats) );
1194
1195 for(size_t i = 0; i < allocCount; ++i)
1196 {
1197 if(allocChanged[i])
1198 {
1199 RecreateAllocationResource(allocs[i]);
1200 }
1201 }
1202}
1203
1204static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1205{
1206 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1207 ValidateAllocationData(allocInfo);
1208 });
1209}
1210
1211void TestDefragmentationSimple()
1212{
1213 wprintf(L"Test defragmentation simple\n");
1214
1215 RandomNumberGenerator rand(667);
1216
1217 const VkDeviceSize BUF_SIZE = 0x10000;
1218 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1219
1220 const VkDeviceSize MIN_BUF_SIZE = 32;
1221 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1222 auto RandomBufSize = [&]() -> VkDeviceSize {
1223 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1224 };
1225
1226 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1227 bufCreateInfo.size = BUF_SIZE;
1228 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1229
1230 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1231 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1232
1233 uint32_t memTypeIndex = UINT32_MAX;
1234 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1235
1236 VmaPoolCreateInfo poolCreateInfo = {};
1237 poolCreateInfo.blockSize = BLOCK_SIZE;
1238 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1239
1240 VmaPool pool;
1241 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1242
1243 std::vector<AllocInfo> allocations;
1244
1245 // persistentlyMappedOption = 0 - not persistently mapped.
1246 // persistentlyMappedOption = 1 - persistently mapped.
1247 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1248 {
1249 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1250 const bool persistentlyMapped = persistentlyMappedOption != 0;
1251
1252 // # Test 1
1253 // Buffers of fixed size.
1254 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1255 // Expected result: at least 1 block freed.
1256 {
1257 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1258 {
1259 AllocInfo allocInfo;
1260 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1261 allocations.push_back(allocInfo);
1262 }
1263
1264 for(size_t i = 1; i < allocations.size(); ++i)
1265 {
1266 DestroyAllocation(allocations[i]);
1267 allocations.erase(allocations.begin() + i);
1268 }
1269
1270 VmaDefragmentationStats defragStats;
1271 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001272 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1273 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001274
1275 ValidateAllocationsData(allocations.data(), allocations.size());
1276
1277 DestroyAllAllocations(allocations);
1278 }
1279
1280 // # Test 2
1281 // Buffers of fixed size.
1282 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1283 // Expected result: Each of 4 interations makes some progress.
1284 {
1285 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1286 {
1287 AllocInfo allocInfo;
1288 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1289 allocations.push_back(allocInfo);
1290 }
1291
1292 for(size_t i = 1; i < allocations.size(); ++i)
1293 {
1294 DestroyAllocation(allocations[i]);
1295 allocations.erase(allocations.begin() + i);
1296 }
1297
1298 VmaDefragmentationInfo defragInfo = {};
1299 defragInfo.maxAllocationsToMove = 1;
1300 defragInfo.maxBytesToMove = BUF_SIZE;
1301
1302 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1303 {
1304 VmaDefragmentationStats defragStats;
1305 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001306 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001307 }
1308
1309 ValidateAllocationsData(allocations.data(), allocations.size());
1310
1311 DestroyAllAllocations(allocations);
1312 }
1313
1314 // # Test 3
1315 // Buffers of variable size.
1316 // Create a number of buffers. Remove some percent of them.
1317 // Defragment while having some percent of them unmovable.
1318 // Expected result: Just simple validation.
1319 {
1320 for(size_t i = 0; i < 100; ++i)
1321 {
1322 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1323 localBufCreateInfo.size = RandomBufSize();
1324
1325 AllocInfo allocInfo;
1326 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1327 allocations.push_back(allocInfo);
1328 }
1329
1330 const uint32_t percentToDelete = 60;
1331 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1332 for(size_t i = 0; i < numberToDelete; ++i)
1333 {
1334 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1335 DestroyAllocation(allocations[indexToDelete]);
1336 allocations.erase(allocations.begin() + indexToDelete);
1337 }
1338
1339 // Non-movable allocations will be at the beginning of allocations array.
1340 const uint32_t percentNonMovable = 20;
1341 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1342 for(size_t i = 0; i < numberNonMovable; ++i)
1343 {
1344 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1345 if(indexNonMovable != i)
1346 std::swap(allocations[i], allocations[indexNonMovable]);
1347 }
1348
1349 VmaDefragmentationStats defragStats;
1350 Defragment(
1351 allocations.data() + numberNonMovable,
1352 allocations.size() - numberNonMovable,
1353 nullptr, &defragStats);
1354
1355 ValidateAllocationsData(allocations.data(), allocations.size());
1356
1357 DestroyAllAllocations(allocations);
1358 }
1359 }
1360
1361 vmaDestroyPool(g_hAllocator, pool);
1362}
1363
Adam Sawicki52076eb2018-11-22 16:14:50 +01001364void TestDefragmentationWholePool()
1365{
1366 wprintf(L"Test defragmentation whole pool\n");
1367
1368 RandomNumberGenerator rand(668);
1369
1370 const VkDeviceSize BUF_SIZE = 0x10000;
1371 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1372
1373 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1374 bufCreateInfo.size = BUF_SIZE;
1375 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1376
1377 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1378 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1379
1380 uint32_t memTypeIndex = UINT32_MAX;
1381 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1382
1383 VmaPoolCreateInfo poolCreateInfo = {};
1384 poolCreateInfo.blockSize = BLOCK_SIZE;
1385 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1386
1387 VmaDefragmentationStats defragStats[2];
1388 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1389 {
1390 VmaPool pool;
1391 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1392
1393 std::vector<AllocInfo> allocations;
1394
1395 // Buffers of fixed size.
1396 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1397 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1398 {
1399 AllocInfo allocInfo;
1400 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1401 allocations.push_back(allocInfo);
1402 }
1403
1404 for(size_t i = 1; i < allocations.size(); ++i)
1405 {
1406 DestroyAllocation(allocations[i]);
1407 allocations.erase(allocations.begin() + i);
1408 }
1409
1410 VmaDefragmentationInfo2 defragInfo = {};
1411 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1412 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1413 std::vector<VmaAllocation> allocationsToDefrag;
1414 if(caseIndex == 0)
1415 {
1416 defragInfo.poolCount = 1;
1417 defragInfo.pPools = &pool;
1418 }
1419 else
1420 {
1421 const size_t allocCount = allocations.size();
1422 allocationsToDefrag.resize(allocCount);
1423 std::transform(
1424 allocations.begin(), allocations.end(),
1425 allocationsToDefrag.begin(),
1426 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1427 defragInfo.allocationCount = (uint32_t)allocCount;
1428 defragInfo.pAllocations = allocationsToDefrag.data();
1429 }
1430
1431 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1432 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1433 TEST(res >= VK_SUCCESS);
1434 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1435
1436 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1437
1438 ValidateAllocationsData(allocations.data(), allocations.size());
1439
1440 DestroyAllAllocations(allocations);
1441
1442 vmaDestroyPool(g_hAllocator, pool);
1443 }
1444
1445 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1446 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1447 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1448 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1449}
1450
Adam Sawickib8333fb2018-03-13 16:15:53 +01001451void TestDefragmentationFull()
1452{
1453 std::vector<AllocInfo> allocations;
1454
1455 // Create initial allocations.
1456 for(size_t i = 0; i < 400; ++i)
1457 {
1458 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001459 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001460 allocations.push_back(allocation);
1461 }
1462
1463 // Delete random allocations
1464 const size_t allocationsToDeletePercent = 80;
1465 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1466 for(size_t i = 0; i < allocationsToDelete; ++i)
1467 {
1468 size_t index = (size_t)rand() % allocations.size();
1469 DestroyAllocation(allocations[index]);
1470 allocations.erase(allocations.begin() + index);
1471 }
1472
1473 for(size_t i = 0; i < allocations.size(); ++i)
1474 ValidateAllocationData(allocations[i]);
1475
Adam Sawicki0667e332018-08-24 17:26:44 +02001476 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001477
1478 {
1479 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1480 for(size_t i = 0; i < allocations.size(); ++i)
1481 vmaAllocations[i] = allocations[i].m_Allocation;
1482
1483 const size_t nonMovablePercent = 0;
1484 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1485 for(size_t i = 0; i < nonMovableCount; ++i)
1486 {
1487 size_t index = (size_t)rand() % vmaAllocations.size();
1488 vmaAllocations.erase(vmaAllocations.begin() + index);
1489 }
1490
1491 const uint32_t defragCount = 1;
1492 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1493 {
1494 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1495
1496 VmaDefragmentationInfo defragmentationInfo;
1497 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1498 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1499
1500 wprintf(L"Defragmentation #%u\n", defragIndex);
1501
1502 time_point begTime = std::chrono::high_resolution_clock::now();
1503
1504 VmaDefragmentationStats stats;
1505 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001506 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001507
1508 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1509
1510 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1511 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1512 wprintf(L"Time: %.2f s\n", defragmentDuration);
1513
1514 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1515 {
1516 if(allocationsChanged[i])
1517 {
1518 RecreateAllocationResource(allocations[i]);
1519 }
1520 }
1521
1522 for(size_t i = 0; i < allocations.size(); ++i)
1523 ValidateAllocationData(allocations[i]);
1524
Adam Sawicki0667e332018-08-24 17:26:44 +02001525 //wchar_t fileName[MAX_PATH];
1526 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1527 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001528 }
1529 }
1530
1531 // Destroy all remaining allocations.
1532 DestroyAllAllocations(allocations);
1533}
1534
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001535static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001536{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001537 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001538 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001539
1540 std::vector<AllocInfo> allocations;
1541
1542 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001543 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1544 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001545 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001546 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1547 const size_t percentToLeave = 30;
1548 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001549 RandomNumberGenerator rand = { 234522 };
1550
1551 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001552
1553 VmaAllocationCreateInfo allocCreateInfo = {};
1554 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001555 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001556
1557 // Create all intended buffers.
1558 for(size_t i = 0; i < bufCount; ++i)
1559 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001560 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1561
1562 if(rand.Generate() % 100 < percentNonMovable)
1563 {
1564 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1565 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1566 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1567 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1568 }
1569 else
1570 {
1571 // Different usage just to see different color in output from VmaDumpVis.
1572 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1573 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1574 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1575 // And in JSON dump.
1576 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1577 }
1578
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001579 AllocInfo alloc;
1580 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1581 alloc.m_StartValue = rand.Generate();
1582 allocations.push_back(alloc);
1583 }
1584
1585 // Destroy some percentage of them.
1586 {
1587 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1588 for(size_t i = 0; i < buffersToDestroy; ++i)
1589 {
1590 const size_t index = rand.Generate() % allocations.size();
1591 allocations[index].Destroy();
1592 allocations.erase(allocations.begin() + index);
1593 }
1594 }
1595
1596 // Fill them with meaningful data.
1597 UploadGpuData(allocations.data(), allocations.size());
1598
Adam Sawickic6ede152018-11-16 17:04:14 +01001599 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001600 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001601 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001602
1603 // Defragment using GPU only.
1604 {
1605 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001606
Adam Sawickic6ede152018-11-16 17:04:14 +01001607 std::vector<VmaAllocation> allocationPtrs;
1608 std::vector<VkBool32> allocationChanged;
1609 std::vector<size_t> allocationOriginalIndex;
1610
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001611 for(size_t i = 0; i < allocCount; ++i)
1612 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001613 VmaAllocationInfo allocInfo = {};
1614 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1615 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1616 {
1617 allocationPtrs.push_back(allocations[i].m_Allocation);
1618 allocationChanged.push_back(VK_FALSE);
1619 allocationOriginalIndex.push_back(i);
1620 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001621 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001622
1623 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001624
1625 BeginSingleTimeCommands();
1626
1627 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001628 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001629 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001630 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001631 defragInfo.pAllocationsChanged = allocationChanged.data();
1632 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001633 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1634 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1635
1636 VmaDefragmentationStats stats = {};
1637 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1638 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1639 TEST(res >= VK_SUCCESS);
1640
1641 EndSingleTimeCommands();
1642
1643 vmaDefragmentationEnd(g_hAllocator, ctx);
1644
Adam Sawickic6ede152018-11-16 17:04:14 +01001645 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001646 {
1647 if(allocationChanged[i])
1648 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001649 const size_t origAllocIndex = allocationOriginalIndex[i];
1650 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001651 }
1652 }
1653
Adam Sawicki440307e2018-10-18 15:05:19 +02001654 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1655 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001656 }
1657
1658 ValidateGpuData(allocations.data(), allocations.size());
1659
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001660 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001661 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001662
1663 // Destroy all remaining buffers.
1664 for(size_t i = allocations.size(); i--; )
1665 {
1666 allocations[i].Destroy();
1667 }
Adam Sawicki05704002018-11-08 16:07:29 +01001668
1669 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001670}
1671
Adam Sawickib8333fb2018-03-13 16:15:53 +01001672static void TestUserData()
1673{
1674 VkResult res;
1675
1676 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1677 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1678 bufCreateInfo.size = 0x10000;
1679
1680 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1681 {
1682 // Opaque pointer
1683 {
1684
1685 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1686 void* pointerToSomething = &res;
1687
1688 VmaAllocationCreateInfo allocCreateInfo = {};
1689 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1690 allocCreateInfo.pUserData = numberAsPointer;
1691 if(testIndex == 1)
1692 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1693
1694 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1695 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001696 TEST(res == VK_SUCCESS);
1697 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001698
1699 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001700 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001701
1702 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1703 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001704 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001705
1706 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1707 }
1708
1709 // String
1710 {
1711 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1712 const char* name2 = "2";
1713 const size_t name1Len = strlen(name1);
1714
1715 char* name1Buf = new char[name1Len + 1];
1716 strcpy_s(name1Buf, name1Len + 1, name1);
1717
1718 VmaAllocationCreateInfo allocCreateInfo = {};
1719 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1720 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1721 allocCreateInfo.pUserData = name1Buf;
1722 if(testIndex == 1)
1723 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1724
1725 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1726 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001727 TEST(res == VK_SUCCESS);
1728 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1729 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001730
1731 delete[] name1Buf;
1732
1733 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001734 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001735
1736 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1737 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001738 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001739
1740 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1741 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001742 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001743
1744 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1745 }
1746 }
1747}
1748
Adam Sawicki370ab182018-11-08 16:31:00 +01001749static void TestInvalidAllocations()
1750{
1751 VkResult res;
1752
1753 VmaAllocationCreateInfo allocCreateInfo = {};
1754 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1755
1756 // Try to allocate 0 bytes.
1757 {
1758 VkMemoryRequirements memReq = {};
1759 memReq.size = 0; // !!!
1760 memReq.alignment = 4;
1761 memReq.memoryTypeBits = UINT32_MAX;
1762 VmaAllocation alloc = VK_NULL_HANDLE;
1763 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1764 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1765 }
1766
1767 // Try to create buffer with size = 0.
1768 {
1769 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1770 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1771 bufCreateInfo.size = 0; // !!!
1772 VkBuffer buf = VK_NULL_HANDLE;
1773 VmaAllocation alloc = VK_NULL_HANDLE;
1774 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1775 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1776 }
1777
1778 // Try to create image with one dimension = 0.
1779 {
1780 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1781 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1782 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1783 imageCreateInfo.extent.width = 128;
1784 imageCreateInfo.extent.height = 0; // !!!
1785 imageCreateInfo.extent.depth = 1;
1786 imageCreateInfo.mipLevels = 1;
1787 imageCreateInfo.arrayLayers = 1;
1788 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1789 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1790 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1791 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1792 VkImage image = VK_NULL_HANDLE;
1793 VmaAllocation alloc = VK_NULL_HANDLE;
1794 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1795 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1796 }
1797}
1798
Adam Sawickib8333fb2018-03-13 16:15:53 +01001799static void TestMemoryRequirements()
1800{
1801 VkResult res;
1802 VkBuffer buf;
1803 VmaAllocation alloc;
1804 VmaAllocationInfo allocInfo;
1805
1806 const VkPhysicalDeviceMemoryProperties* memProps;
1807 vmaGetMemoryProperties(g_hAllocator, &memProps);
1808
1809 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1810 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1811 bufInfo.size = 128;
1812
1813 VmaAllocationCreateInfo allocCreateInfo = {};
1814
1815 // No requirements.
1816 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001817 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001818 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1819
1820 // Usage.
1821 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1822 allocCreateInfo.requiredFlags = 0;
1823 allocCreateInfo.preferredFlags = 0;
1824 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1825
1826 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001827 TEST(res == VK_SUCCESS);
1828 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001829 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1830
1831 // Required flags, preferred flags.
1832 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1833 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1834 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1835 allocCreateInfo.memoryTypeBits = 0;
1836
1837 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001838 TEST(res == VK_SUCCESS);
1839 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1840 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001841 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1842
1843 // memoryTypeBits.
1844 const uint32_t memType = allocInfo.memoryType;
1845 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1846 allocCreateInfo.requiredFlags = 0;
1847 allocCreateInfo.preferredFlags = 0;
1848 allocCreateInfo.memoryTypeBits = 1u << memType;
1849
1850 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001851 TEST(res == VK_SUCCESS);
1852 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001853 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1854
1855}
1856
1857static void TestBasics()
1858{
1859 VkResult res;
1860
1861 TestMemoryRequirements();
1862
1863 // Lost allocation
1864 {
1865 VmaAllocation alloc = VK_NULL_HANDLE;
1866 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001867 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001868
1869 VmaAllocationInfo allocInfo;
1870 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001871 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1872 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001873
1874 vmaFreeMemory(g_hAllocator, alloc);
1875 }
1876
1877 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1878 {
1879 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1880 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1881 bufCreateInfo.size = 128;
1882
1883 VmaAllocationCreateInfo allocCreateInfo = {};
1884 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1885 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1886
1887 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1888 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001889 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001890
1891 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1892
1893 // Same with OWN_MEMORY.
1894 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1895
1896 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001897 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001898
1899 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1900 }
1901
1902 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001903
1904 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001905}
1906
1907void TestHeapSizeLimit()
1908{
1909 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1910 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1911
1912 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1913 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1914 {
1915 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1916 }
1917
1918 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1919 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1920 allocatorCreateInfo.device = g_hDevice;
1921 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1922
1923 VmaAllocator hAllocator;
1924 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001925 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001926
1927 struct Item
1928 {
1929 VkBuffer hBuf;
1930 VmaAllocation hAlloc;
1931 };
1932 std::vector<Item> items;
1933
1934 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1935 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1936
1937 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
1938 VmaAllocationInfo ownAllocInfo;
1939 {
1940 VmaAllocationCreateInfo allocCreateInfo = {};
1941 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1942 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1943
1944 bufCreateInfo.size = BLOCK_SIZE / 2;
1945
1946 for(size_t i = 0; i < 2; ++i)
1947 {
1948 Item item;
1949 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001950 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001951 items.push_back(item);
1952 }
1953 }
1954
1955 // Create pool to make sure allocations must be out of this memory type.
1956 VmaPoolCreateInfo poolCreateInfo = {};
1957 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
1958 poolCreateInfo.blockSize = BLOCK_SIZE;
1959
1960 VmaPool hPool;
1961 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001962 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001963
1964 // 2. Allocate normal buffers from all the remaining memory.
1965 {
1966 VmaAllocationCreateInfo allocCreateInfo = {};
1967 allocCreateInfo.pool = hPool;
1968
1969 bufCreateInfo.size = BLOCK_SIZE / 2;
1970
1971 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
1972 for(size_t i = 0; i < bufCount; ++i)
1973 {
1974 Item item;
1975 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001976 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001977 items.push_back(item);
1978 }
1979 }
1980
1981 // 3. Allocation of one more (even small) buffer should fail.
1982 {
1983 VmaAllocationCreateInfo allocCreateInfo = {};
1984 allocCreateInfo.pool = hPool;
1985
1986 bufCreateInfo.size = 128;
1987
1988 VkBuffer hBuf;
1989 VmaAllocation hAlloc;
1990 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001991 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001992 }
1993
1994 // Destroy everything.
1995 for(size_t i = items.size(); i--; )
1996 {
1997 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
1998 }
1999
2000 vmaDestroyPool(hAllocator, hPool);
2001
2002 vmaDestroyAllocator(hAllocator);
2003}
2004
Adam Sawicki212a4a62018-06-14 15:44:45 +02002005#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002006static void TestDebugMargin()
2007{
2008 if(VMA_DEBUG_MARGIN == 0)
2009 {
2010 return;
2011 }
2012
2013 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002014 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002015
2016 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002017 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002018
2019 // Create few buffers of different size.
2020 const size_t BUF_COUNT = 10;
2021 BufferInfo buffers[BUF_COUNT];
2022 VmaAllocationInfo allocInfo[BUF_COUNT];
2023 for(size_t i = 0; i < 10; ++i)
2024 {
2025 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002026 // Last one will be mapped.
2027 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002028
2029 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002030 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002031 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002032 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002033
2034 if(i == BUF_COUNT - 1)
2035 {
2036 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002037 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002038 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2039 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2040 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002041 }
2042
2043 // Check if their offsets preserve margin between them.
2044 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2045 {
2046 if(lhs.deviceMemory != rhs.deviceMemory)
2047 {
2048 return lhs.deviceMemory < rhs.deviceMemory;
2049 }
2050 return lhs.offset < rhs.offset;
2051 });
2052 for(size_t i = 1; i < BUF_COUNT; ++i)
2053 {
2054 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2055 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002056 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002057 }
2058 }
2059
Adam Sawicki212a4a62018-06-14 15:44:45 +02002060 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002061 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002062
Adam Sawicki73b16652018-06-11 16:39:25 +02002063 // Destroy all buffers.
2064 for(size_t i = BUF_COUNT; i--; )
2065 {
2066 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2067 }
2068}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002069#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002070
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002071static void TestLinearAllocator()
2072{
2073 wprintf(L"Test linear allocator\n");
2074
2075 RandomNumberGenerator rand{645332};
2076
2077 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2078 sampleBufCreateInfo.size = 1024; // Whatever.
2079 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2080
2081 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2082 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2083
2084 VmaPoolCreateInfo poolCreateInfo = {};
2085 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002086 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002087
Adam Sawickiee082772018-06-20 17:45:49 +02002088 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002089 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2090 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2091
2092 VmaPool pool = nullptr;
2093 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002094 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002095
2096 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2097
2098 VmaAllocationCreateInfo allocCreateInfo = {};
2099 allocCreateInfo.pool = pool;
2100
2101 constexpr size_t maxBufCount = 100;
2102 std::vector<BufferInfo> bufInfo;
2103
2104 constexpr VkDeviceSize bufSizeMin = 16;
2105 constexpr VkDeviceSize bufSizeMax = 1024;
2106 VmaAllocationInfo allocInfo;
2107 VkDeviceSize prevOffset = 0;
2108
2109 // Test one-time free.
2110 for(size_t i = 0; i < 2; ++i)
2111 {
2112 // Allocate number of buffers of varying size that surely fit into this block.
2113 VkDeviceSize bufSumSize = 0;
2114 for(size_t i = 0; i < maxBufCount; ++i)
2115 {
2116 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2117 BufferInfo newBufInfo;
2118 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2119 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002120 TEST(res == VK_SUCCESS);
2121 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002122 bufInfo.push_back(newBufInfo);
2123 prevOffset = allocInfo.offset;
2124 bufSumSize += bufCreateInfo.size;
2125 }
2126
2127 // Validate pool stats.
2128 VmaPoolStats stats;
2129 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002130 TEST(stats.size == poolCreateInfo.blockSize);
2131 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2132 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002133
2134 // Destroy the buffers in random order.
2135 while(!bufInfo.empty())
2136 {
2137 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2138 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2139 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2140 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2141 }
2142 }
2143
2144 // Test stack.
2145 {
2146 // Allocate number of buffers of varying size that surely fit into this block.
2147 for(size_t i = 0; i < maxBufCount; ++i)
2148 {
2149 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2150 BufferInfo newBufInfo;
2151 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2152 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002153 TEST(res == VK_SUCCESS);
2154 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002155 bufInfo.push_back(newBufInfo);
2156 prevOffset = allocInfo.offset;
2157 }
2158
2159 // Destroy few buffers from top of the stack.
2160 for(size_t i = 0; i < maxBufCount / 5; ++i)
2161 {
2162 const BufferInfo& currBufInfo = bufInfo.back();
2163 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2164 bufInfo.pop_back();
2165 }
2166
2167 // Create some more
2168 for(size_t i = 0; i < maxBufCount / 5; ++i)
2169 {
2170 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2171 BufferInfo newBufInfo;
2172 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2173 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002174 TEST(res == VK_SUCCESS);
2175 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002176 bufInfo.push_back(newBufInfo);
2177 prevOffset = allocInfo.offset;
2178 }
2179
2180 // Destroy the buffers in reverse order.
2181 while(!bufInfo.empty())
2182 {
2183 const BufferInfo& currBufInfo = bufInfo.back();
2184 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2185 bufInfo.pop_back();
2186 }
2187 }
2188
Adam Sawickiee082772018-06-20 17:45:49 +02002189 // Test ring buffer.
2190 {
2191 // Allocate number of buffers that surely fit into this block.
2192 bufCreateInfo.size = bufSizeMax;
2193 for(size_t i = 0; i < maxBufCount; ++i)
2194 {
2195 BufferInfo newBufInfo;
2196 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2197 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002198 TEST(res == VK_SUCCESS);
2199 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002200 bufInfo.push_back(newBufInfo);
2201 prevOffset = allocInfo.offset;
2202 }
2203
2204 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2205 const size_t buffersPerIter = maxBufCount / 10 - 1;
2206 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2207 for(size_t iter = 0; iter < iterCount; ++iter)
2208 {
2209 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2210 {
2211 const BufferInfo& currBufInfo = bufInfo.front();
2212 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2213 bufInfo.erase(bufInfo.begin());
2214 }
2215 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2216 {
2217 BufferInfo newBufInfo;
2218 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2219 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002220 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002221 bufInfo.push_back(newBufInfo);
2222 }
2223 }
2224
2225 // Allocate buffers until we reach out-of-memory.
2226 uint32_t debugIndex = 0;
2227 while(res == VK_SUCCESS)
2228 {
2229 BufferInfo newBufInfo;
2230 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2231 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2232 if(res == VK_SUCCESS)
2233 {
2234 bufInfo.push_back(newBufInfo);
2235 }
2236 else
2237 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002238 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002239 }
2240 ++debugIndex;
2241 }
2242
2243 // Destroy the buffers in random order.
2244 while(!bufInfo.empty())
2245 {
2246 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2247 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2248 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2249 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2250 }
2251 }
2252
Adam Sawicki680b2252018-08-22 14:47:32 +02002253 // Test double stack.
2254 {
2255 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2256 VkDeviceSize prevOffsetLower = 0;
2257 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2258 for(size_t i = 0; i < maxBufCount; ++i)
2259 {
2260 const bool upperAddress = (i % 2) != 0;
2261 if(upperAddress)
2262 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2263 else
2264 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2265 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2266 BufferInfo newBufInfo;
2267 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2268 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002269 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002270 if(upperAddress)
2271 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002272 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002273 prevOffsetUpper = allocInfo.offset;
2274 }
2275 else
2276 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002277 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002278 prevOffsetLower = allocInfo.offset;
2279 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002280 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002281 bufInfo.push_back(newBufInfo);
2282 }
2283
2284 // Destroy few buffers from top of the stack.
2285 for(size_t i = 0; i < maxBufCount / 5; ++i)
2286 {
2287 const BufferInfo& currBufInfo = bufInfo.back();
2288 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2289 bufInfo.pop_back();
2290 }
2291
2292 // Create some more
2293 for(size_t i = 0; i < maxBufCount / 5; ++i)
2294 {
2295 const bool upperAddress = (i % 2) != 0;
2296 if(upperAddress)
2297 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2298 else
2299 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2300 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2301 BufferInfo newBufInfo;
2302 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2303 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002304 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002305 bufInfo.push_back(newBufInfo);
2306 }
2307
2308 // Destroy the buffers in reverse order.
2309 while(!bufInfo.empty())
2310 {
2311 const BufferInfo& currBufInfo = bufInfo.back();
2312 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2313 bufInfo.pop_back();
2314 }
2315
2316 // Create buffers on both sides until we reach out of memory.
2317 prevOffsetLower = 0;
2318 prevOffsetUpper = poolCreateInfo.blockSize;
2319 res = VK_SUCCESS;
2320 for(size_t i = 0; res == VK_SUCCESS; ++i)
2321 {
2322 const bool upperAddress = (i % 2) != 0;
2323 if(upperAddress)
2324 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2325 else
2326 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2327 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2328 BufferInfo newBufInfo;
2329 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2330 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2331 if(res == VK_SUCCESS)
2332 {
2333 if(upperAddress)
2334 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002335 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002336 prevOffsetUpper = allocInfo.offset;
2337 }
2338 else
2339 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002340 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002341 prevOffsetLower = allocInfo.offset;
2342 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002343 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002344 bufInfo.push_back(newBufInfo);
2345 }
2346 }
2347
2348 // Destroy the buffers in random order.
2349 while(!bufInfo.empty())
2350 {
2351 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2352 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2353 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2354 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2355 }
2356
2357 // Create buffers on upper side only, constant size, until we reach out of memory.
2358 prevOffsetUpper = poolCreateInfo.blockSize;
2359 res = VK_SUCCESS;
2360 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2361 bufCreateInfo.size = bufSizeMax;
2362 for(size_t i = 0; res == VK_SUCCESS; ++i)
2363 {
2364 BufferInfo newBufInfo;
2365 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2366 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2367 if(res == VK_SUCCESS)
2368 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002369 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002370 prevOffsetUpper = allocInfo.offset;
2371 bufInfo.push_back(newBufInfo);
2372 }
2373 }
2374
2375 // Destroy the buffers in reverse order.
2376 while(!bufInfo.empty())
2377 {
2378 const BufferInfo& currBufInfo = bufInfo.back();
2379 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2380 bufInfo.pop_back();
2381 }
2382 }
2383
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002384 // Test ring buffer with lost allocations.
2385 {
2386 // Allocate number of buffers until pool is full.
2387 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2388 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2389 res = VK_SUCCESS;
2390 for(size_t i = 0; res == VK_SUCCESS; ++i)
2391 {
2392 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2393
2394 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2395
2396 BufferInfo newBufInfo;
2397 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2398 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2399 if(res == VK_SUCCESS)
2400 bufInfo.push_back(newBufInfo);
2401 }
2402
2403 // Free first half of it.
2404 {
2405 const size_t buffersToDelete = bufInfo.size() / 2;
2406 for(size_t i = 0; i < buffersToDelete; ++i)
2407 {
2408 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2409 }
2410 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2411 }
2412
2413 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002414 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002415 res = VK_SUCCESS;
2416 for(size_t i = 0; res == VK_SUCCESS; ++i)
2417 {
2418 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2419
2420 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2421
2422 BufferInfo newBufInfo;
2423 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2424 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2425 if(res == VK_SUCCESS)
2426 bufInfo.push_back(newBufInfo);
2427 }
2428
2429 VkDeviceSize firstNewOffset;
2430 {
2431 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2432
2433 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2434 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2435 bufCreateInfo.size = bufSizeMax;
2436
2437 BufferInfo newBufInfo;
2438 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2439 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002440 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002441 bufInfo.push_back(newBufInfo);
2442 firstNewOffset = allocInfo.offset;
2443
2444 // Make sure at least one buffer from the beginning became lost.
2445 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002446 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002447 }
2448
2449 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2450 size_t newCount = 1;
2451 for(;;)
2452 {
2453 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2454
2455 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2456
2457 BufferInfo newBufInfo;
2458 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2459 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002460 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002461 bufInfo.push_back(newBufInfo);
2462 ++newCount;
2463 if(allocInfo.offset < firstNewOffset)
2464 break;
2465 }
2466
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002467 // Delete buffers that are lost.
2468 for(size_t i = bufInfo.size(); i--; )
2469 {
2470 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2471 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2472 {
2473 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2474 bufInfo.erase(bufInfo.begin() + i);
2475 }
2476 }
2477
2478 // Test vmaMakePoolAllocationsLost
2479 {
2480 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2481
2482 size_t lostAllocCount = SIZE_MAX;
2483 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002484 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002485
2486 size_t realLostAllocCount = 0;
2487 for(size_t i = 0; i < bufInfo.size(); ++i)
2488 {
2489 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2490 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2491 ++realLostAllocCount;
2492 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002493 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002494 }
2495
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002496 // Destroy all the buffers in forward order.
2497 for(size_t i = 0; i < bufInfo.size(); ++i)
2498 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2499 bufInfo.clear();
2500 }
2501
Adam Sawicki70a683e2018-08-24 15:36:32 +02002502 vmaDestroyPool(g_hAllocator, pool);
2503}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002504
Adam Sawicki70a683e2018-08-24 15:36:32 +02002505static void TestLinearAllocatorMultiBlock()
2506{
2507 wprintf(L"Test linear allocator multi block\n");
2508
2509 RandomNumberGenerator rand{345673};
2510
2511 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2512 sampleBufCreateInfo.size = 1024 * 1024;
2513 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2514
2515 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2516 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2517
2518 VmaPoolCreateInfo poolCreateInfo = {};
2519 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2520 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002521 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002522
2523 VmaPool pool = nullptr;
2524 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002525 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002526
2527 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2528
2529 VmaAllocationCreateInfo allocCreateInfo = {};
2530 allocCreateInfo.pool = pool;
2531
2532 std::vector<BufferInfo> bufInfo;
2533 VmaAllocationInfo allocInfo;
2534
2535 // Test one-time free.
2536 {
2537 // Allocate buffers until we move to a second block.
2538 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2539 for(uint32_t i = 0; ; ++i)
2540 {
2541 BufferInfo newBufInfo;
2542 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2543 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002544 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002545 bufInfo.push_back(newBufInfo);
2546 if(lastMem && allocInfo.deviceMemory != lastMem)
2547 {
2548 break;
2549 }
2550 lastMem = allocInfo.deviceMemory;
2551 }
2552
Adam Sawickib8d34d52018-10-03 17:41:20 +02002553 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002554
2555 // Make sure that pool has now two blocks.
2556 VmaPoolStats poolStats = {};
2557 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002558 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002559
2560 // Destroy all the buffers in random order.
2561 while(!bufInfo.empty())
2562 {
2563 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2564 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2565 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2566 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2567 }
2568
2569 // Make sure that pool has now at most one block.
2570 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002571 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002572 }
2573
2574 // Test stack.
2575 {
2576 // Allocate buffers until we move to a second block.
2577 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2578 for(uint32_t i = 0; ; ++i)
2579 {
2580 BufferInfo newBufInfo;
2581 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2582 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002583 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002584 bufInfo.push_back(newBufInfo);
2585 if(lastMem && allocInfo.deviceMemory != lastMem)
2586 {
2587 break;
2588 }
2589 lastMem = allocInfo.deviceMemory;
2590 }
2591
Adam Sawickib8d34d52018-10-03 17:41:20 +02002592 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002593
2594 // Add few more buffers.
2595 for(uint32_t i = 0; i < 5; ++i)
2596 {
2597 BufferInfo newBufInfo;
2598 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2599 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002600 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002601 bufInfo.push_back(newBufInfo);
2602 }
2603
2604 // Make sure that pool has now two blocks.
2605 VmaPoolStats poolStats = {};
2606 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002607 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002608
2609 // Delete half of buffers, LIFO.
2610 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2611 {
2612 const BufferInfo& currBufInfo = bufInfo.back();
2613 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2614 bufInfo.pop_back();
2615 }
2616
2617 // Add one more buffer.
2618 BufferInfo newBufInfo;
2619 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2620 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002621 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002622 bufInfo.push_back(newBufInfo);
2623
2624 // Make sure that pool has now one block.
2625 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002626 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002627
2628 // Delete all the remaining buffers, LIFO.
2629 while(!bufInfo.empty())
2630 {
2631 const BufferInfo& currBufInfo = bufInfo.back();
2632 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2633 bufInfo.pop_back();
2634 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002635 }
2636
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002637 vmaDestroyPool(g_hAllocator, pool);
2638}
2639
Adam Sawickifd11d752018-08-22 15:02:10 +02002640static void ManuallyTestLinearAllocator()
2641{
2642 VmaStats origStats;
2643 vmaCalculateStats(g_hAllocator, &origStats);
2644
2645 wprintf(L"Manually test linear allocator\n");
2646
2647 RandomNumberGenerator rand{645332};
2648
2649 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2650 sampleBufCreateInfo.size = 1024; // Whatever.
2651 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2652
2653 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2654 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2655
2656 VmaPoolCreateInfo poolCreateInfo = {};
2657 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002658 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002659
2660 poolCreateInfo.blockSize = 10 * 1024;
2661 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2662 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2663
2664 VmaPool pool = nullptr;
2665 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002666 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002667
2668 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2669
2670 VmaAllocationCreateInfo allocCreateInfo = {};
2671 allocCreateInfo.pool = pool;
2672
2673 std::vector<BufferInfo> bufInfo;
2674 VmaAllocationInfo allocInfo;
2675 BufferInfo newBufInfo;
2676
2677 // Test double stack.
2678 {
2679 /*
2680 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2681 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2682
2683 Totally:
2684 1 block allocated
2685 10240 Vulkan bytes
2686 6 new allocations
2687 2256 bytes in allocations
2688 */
2689
2690 bufCreateInfo.size = 32;
2691 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2692 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002693 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002694 bufInfo.push_back(newBufInfo);
2695
2696 bufCreateInfo.size = 1024;
2697 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2698 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002699 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002700 bufInfo.push_back(newBufInfo);
2701
2702 bufCreateInfo.size = 32;
2703 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2704 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002705 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002706 bufInfo.push_back(newBufInfo);
2707
2708 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2709
2710 bufCreateInfo.size = 128;
2711 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2712 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002713 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002714 bufInfo.push_back(newBufInfo);
2715
2716 bufCreateInfo.size = 1024;
2717 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2718 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002719 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002720 bufInfo.push_back(newBufInfo);
2721
2722 bufCreateInfo.size = 16;
2723 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2724 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002725 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002726 bufInfo.push_back(newBufInfo);
2727
2728 VmaStats currStats;
2729 vmaCalculateStats(g_hAllocator, &currStats);
2730 VmaPoolStats poolStats;
2731 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2732
2733 char* statsStr = nullptr;
2734 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2735
2736 // PUT BREAKPOINT HERE TO CHECK.
2737 // Inspect: currStats versus origStats, poolStats, statsStr.
2738 int I = 0;
2739
2740 vmaFreeStatsString(g_hAllocator, statsStr);
2741
2742 // Destroy the buffers in reverse order.
2743 while(!bufInfo.empty())
2744 {
2745 const BufferInfo& currBufInfo = bufInfo.back();
2746 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2747 bufInfo.pop_back();
2748 }
2749 }
2750
2751 vmaDestroyPool(g_hAllocator, pool);
2752}
2753
Adam Sawicki80927152018-09-07 17:27:23 +02002754static void BenchmarkAlgorithmsCase(FILE* file,
2755 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002756 bool empty,
2757 VmaAllocationCreateFlags allocStrategy,
2758 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002759{
2760 RandomNumberGenerator rand{16223};
2761
2762 const VkDeviceSize bufSizeMin = 32;
2763 const VkDeviceSize bufSizeMax = 1024;
2764 const size_t maxBufCapacity = 10000;
2765 const uint32_t iterationCount = 10;
2766
2767 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2768 sampleBufCreateInfo.size = bufSizeMax;
2769 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2770
2771 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2772 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2773
2774 VmaPoolCreateInfo poolCreateInfo = {};
2775 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002776 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002777
2778 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002779 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002780 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2781
2782 VmaPool pool = nullptr;
2783 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002784 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002785
2786 // Buffer created just to get memory requirements. Never bound to any memory.
2787 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2788 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002789 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002790
2791 VkMemoryRequirements memReq = {};
2792 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2793
2794 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2795
2796 VmaAllocationCreateInfo allocCreateInfo = {};
2797 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002798 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002799
2800 VmaAllocation alloc;
2801 std::vector<VmaAllocation> baseAllocations;
2802
2803 if(!empty)
2804 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002805 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002806 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002807 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002808 {
2809 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2810 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002811 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002812 baseAllocations.push_back(alloc);
2813 totalSize += memReq.size;
2814 }
2815
2816 // Delete half of them, choose randomly.
2817 size_t allocsToDelete = baseAllocations.size() / 2;
2818 for(size_t i = 0; i < allocsToDelete; ++i)
2819 {
2820 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2821 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2822 baseAllocations.erase(baseAllocations.begin() + index);
2823 }
2824 }
2825
2826 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002827 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002828 std::vector<VmaAllocation> testAllocations;
2829 testAllocations.reserve(allocCount);
2830 duration allocTotalDuration = duration::zero();
2831 duration freeTotalDuration = duration::zero();
2832 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2833 {
2834 // Allocations
2835 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2836 for(size_t i = 0; i < allocCount; ++i)
2837 {
2838 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2839 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002840 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002841 testAllocations.push_back(alloc);
2842 }
2843 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2844
2845 // Deallocations
2846 switch(freeOrder)
2847 {
2848 case FREE_ORDER::FORWARD:
2849 // Leave testAllocations unchanged.
2850 break;
2851 case FREE_ORDER::BACKWARD:
2852 std::reverse(testAllocations.begin(), testAllocations.end());
2853 break;
2854 case FREE_ORDER::RANDOM:
2855 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2856 break;
2857 default: assert(0);
2858 }
2859
2860 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2861 for(size_t i = 0; i < allocCount; ++i)
2862 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2863 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2864
2865 testAllocations.clear();
2866 }
2867
2868 // Delete baseAllocations
2869 while(!baseAllocations.empty())
2870 {
2871 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2872 baseAllocations.pop_back();
2873 }
2874
2875 vmaDestroyPool(g_hAllocator, pool);
2876
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002877 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2878 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2879
Adam Sawicki80927152018-09-07 17:27:23 +02002880 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2881 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002882 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002883 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002884 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002885 allocTotalSeconds,
2886 freeTotalSeconds);
2887
2888 if(file)
2889 {
2890 std::string currTime;
2891 CurrentTimeToStr(currTime);
2892
Adam Sawicki80927152018-09-07 17:27:23 +02002893 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002894 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002895 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002896 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002897 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002898 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2899 allocTotalSeconds,
2900 freeTotalSeconds);
2901 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002902}
2903
Adam Sawicki80927152018-09-07 17:27:23 +02002904static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002905{
Adam Sawicki80927152018-09-07 17:27:23 +02002906 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002907
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002908 if(file)
2909 {
2910 fprintf(file,
2911 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002912 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002913 "Allocation time (s),Deallocation time (s)\n");
2914 }
2915
Adam Sawicki0a607132018-08-24 11:18:41 +02002916 uint32_t freeOrderCount = 1;
2917 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2918 freeOrderCount = 3;
2919 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2920 freeOrderCount = 2;
2921
2922 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002923 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002924
2925 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2926 {
2927 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2928 switch(freeOrderIndex)
2929 {
2930 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2931 case 1: freeOrder = FREE_ORDER::FORWARD; break;
2932 case 2: freeOrder = FREE_ORDER::RANDOM; break;
2933 default: assert(0);
2934 }
2935
2936 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
2937 {
Adam Sawicki80927152018-09-07 17:27:23 +02002938 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02002939 {
Adam Sawicki80927152018-09-07 17:27:23 +02002940 uint32_t algorithm = 0;
2941 switch(algorithmIndex)
2942 {
2943 case 0:
2944 break;
2945 case 1:
2946 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
2947 break;
2948 case 2:
2949 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2950 break;
2951 default:
2952 assert(0);
2953 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002954
Adam Sawicki80927152018-09-07 17:27:23 +02002955 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002956 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
2957 {
2958 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02002959 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002960 {
2961 switch(allocStrategyIndex)
2962 {
2963 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
2964 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
2965 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
2966 default: assert(0);
2967 }
2968 }
2969
Adam Sawicki80927152018-09-07 17:27:23 +02002970 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002971 file,
Adam Sawicki80927152018-09-07 17:27:23 +02002972 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002973 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002974 strategy,
2975 freeOrder); // freeOrder
2976 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002977 }
2978 }
2979 }
2980}
2981
Adam Sawickib8333fb2018-03-13 16:15:53 +01002982static void TestPool_SameSize()
2983{
2984 const VkDeviceSize BUF_SIZE = 1024 * 1024;
2985 const size_t BUF_COUNT = 100;
2986 VkResult res;
2987
2988 RandomNumberGenerator rand{123};
2989
2990 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2991 bufferInfo.size = BUF_SIZE;
2992 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2993
2994 uint32_t memoryTypeBits = UINT32_MAX;
2995 {
2996 VkBuffer dummyBuffer;
2997 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002998 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002999
3000 VkMemoryRequirements memReq;
3001 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3002 memoryTypeBits = memReq.memoryTypeBits;
3003
3004 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3005 }
3006
3007 VmaAllocationCreateInfo poolAllocInfo = {};
3008 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3009 uint32_t memTypeIndex;
3010 res = vmaFindMemoryTypeIndex(
3011 g_hAllocator,
3012 memoryTypeBits,
3013 &poolAllocInfo,
3014 &memTypeIndex);
3015
3016 VmaPoolCreateInfo poolCreateInfo = {};
3017 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3018 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3019 poolCreateInfo.minBlockCount = 1;
3020 poolCreateInfo.maxBlockCount = 4;
3021 poolCreateInfo.frameInUseCount = 0;
3022
3023 VmaPool pool;
3024 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003025 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003026
3027 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3028
3029 VmaAllocationCreateInfo allocInfo = {};
3030 allocInfo.pool = pool;
3031 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3032 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3033
3034 struct BufItem
3035 {
3036 VkBuffer Buf;
3037 VmaAllocation Alloc;
3038 };
3039 std::vector<BufItem> items;
3040
3041 // Fill entire pool.
3042 for(size_t i = 0; i < BUF_COUNT; ++i)
3043 {
3044 BufItem item;
3045 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003046 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003047 items.push_back(item);
3048 }
3049
3050 // Make sure that another allocation would fail.
3051 {
3052 BufItem item;
3053 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003054 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003055 }
3056
3057 // Validate that no buffer is lost. Also check that they are not mapped.
3058 for(size_t i = 0; i < items.size(); ++i)
3059 {
3060 VmaAllocationInfo allocInfo;
3061 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003062 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3063 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003064 }
3065
3066 // Free some percent of random items.
3067 {
3068 const size_t PERCENT_TO_FREE = 10;
3069 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3070 for(size_t i = 0; i < itemsToFree; ++i)
3071 {
3072 size_t index = (size_t)rand.Generate() % items.size();
3073 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3074 items.erase(items.begin() + index);
3075 }
3076 }
3077
3078 // Randomly allocate and free items.
3079 {
3080 const size_t OPERATION_COUNT = BUF_COUNT;
3081 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3082 {
3083 bool allocate = rand.Generate() % 2 != 0;
3084 if(allocate)
3085 {
3086 if(items.size() < BUF_COUNT)
3087 {
3088 BufItem item;
3089 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003090 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003091 items.push_back(item);
3092 }
3093 }
3094 else // Free
3095 {
3096 if(!items.empty())
3097 {
3098 size_t index = (size_t)rand.Generate() % items.size();
3099 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3100 items.erase(items.begin() + index);
3101 }
3102 }
3103 }
3104 }
3105
3106 // Allocate up to maximum.
3107 while(items.size() < BUF_COUNT)
3108 {
3109 BufItem item;
3110 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003111 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003112 items.push_back(item);
3113 }
3114
3115 // Validate that no buffer is lost.
3116 for(size_t i = 0; i < items.size(); ++i)
3117 {
3118 VmaAllocationInfo allocInfo;
3119 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003120 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003121 }
3122
3123 // Next frame.
3124 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3125
3126 // Allocate another BUF_COUNT buffers.
3127 for(size_t i = 0; i < BUF_COUNT; ++i)
3128 {
3129 BufItem item;
3130 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003131 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003132 items.push_back(item);
3133 }
3134
3135 // Make sure the first BUF_COUNT is lost. Delete them.
3136 for(size_t i = 0; i < BUF_COUNT; ++i)
3137 {
3138 VmaAllocationInfo allocInfo;
3139 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003140 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003141 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3142 }
3143 items.erase(items.begin(), items.begin() + BUF_COUNT);
3144
3145 // Validate that no buffer is lost.
3146 for(size_t i = 0; i < items.size(); ++i)
3147 {
3148 VmaAllocationInfo allocInfo;
3149 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003150 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003151 }
3152
3153 // Free one item.
3154 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3155 items.pop_back();
3156
3157 // Validate statistics.
3158 {
3159 VmaPoolStats poolStats = {};
3160 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003161 TEST(poolStats.allocationCount == items.size());
3162 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3163 TEST(poolStats.unusedRangeCount == 1);
3164 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3165 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003166 }
3167
3168 // Free all remaining items.
3169 for(size_t i = items.size(); i--; )
3170 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3171 items.clear();
3172
3173 // Allocate maximum items again.
3174 for(size_t i = 0; i < BUF_COUNT; ++i)
3175 {
3176 BufItem item;
3177 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003178 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003179 items.push_back(item);
3180 }
3181
3182 // Delete every other item.
3183 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3184 {
3185 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3186 items.erase(items.begin() + i);
3187 }
3188
3189 // Defragment!
3190 {
3191 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3192 for(size_t i = 0; i < items.size(); ++i)
3193 allocationsToDefragment[i] = items[i].Alloc;
3194
3195 VmaDefragmentationStats defragmentationStats;
3196 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003197 TEST(res == VK_SUCCESS);
3198 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003199 }
3200
3201 // Free all remaining items.
3202 for(size_t i = items.size(); i--; )
3203 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3204 items.clear();
3205
3206 ////////////////////////////////////////////////////////////////////////////////
3207 // Test for vmaMakePoolAllocationsLost
3208
3209 // Allocate 4 buffers on frame 10.
3210 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3211 for(size_t i = 0; i < 4; ++i)
3212 {
3213 BufItem item;
3214 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003215 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003216 items.push_back(item);
3217 }
3218
3219 // Touch first 2 of them on frame 11.
3220 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3221 for(size_t i = 0; i < 2; ++i)
3222 {
3223 VmaAllocationInfo allocInfo;
3224 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3225 }
3226
3227 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3228 size_t lostCount = 0xDEADC0DE;
3229 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003230 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003231
3232 // Make another call. Now 0 should be lost.
3233 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003234 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003235
3236 // Make another call, with null count. Should not crash.
3237 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3238
3239 // END: Free all remaining items.
3240 for(size_t i = items.size(); i--; )
3241 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3242
3243 items.clear();
3244
Adam Sawickid2924172018-06-11 12:48:46 +02003245 ////////////////////////////////////////////////////////////////////////////////
3246 // Test for allocation too large for pool
3247
3248 {
3249 VmaAllocationCreateInfo allocCreateInfo = {};
3250 allocCreateInfo.pool = pool;
3251
3252 VkMemoryRequirements memReq;
3253 memReq.memoryTypeBits = UINT32_MAX;
3254 memReq.alignment = 1;
3255 memReq.size = poolCreateInfo.blockSize + 4;
3256
3257 VmaAllocation alloc = nullptr;
3258 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003259 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003260 }
3261
Adam Sawickib8333fb2018-03-13 16:15:53 +01003262 vmaDestroyPool(g_hAllocator, pool);
3263}
3264
Adam Sawickib0c36362018-11-13 16:17:38 +01003265static void TestResize()
3266{
3267 wprintf(L"Testing vmaResizeAllocation...\n");
3268
3269 const VkDeviceSize KILOBYTE = 1024ull;
3270 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3271
3272 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3273 bufCreateInfo.size = 2 * MEGABYTE;
3274 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3275
3276 VmaAllocationCreateInfo allocCreateInfo = {};
3277 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3278
3279 uint32_t memTypeIndex = UINT32_MAX;
3280 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3281
3282 VmaPoolCreateInfo poolCreateInfo = {};
3283 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3284 poolCreateInfo.blockSize = 8 * MEGABYTE;
3285 poolCreateInfo.minBlockCount = 1;
3286 poolCreateInfo.maxBlockCount = 1;
3287 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3288
3289 VmaPool pool;
3290 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3291
3292 allocCreateInfo.pool = pool;
3293
3294 // Fill 8 MB pool with 4 * 2 MB allocations.
3295 VmaAllocation allocs[4] = {};
3296
3297 VkMemoryRequirements memReq = {};
3298 memReq.memoryTypeBits = UINT32_MAX;
3299 memReq.alignment = 4;
3300 memReq.size = bufCreateInfo.size;
3301
3302 VmaAllocationInfo allocInfo = {};
3303
3304 for(uint32_t i = 0; i < 4; ++i)
3305 {
3306 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3307 }
3308
3309 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3310
3311 // Case: Resize to the same size always succeeds.
3312 {
3313 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3314 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3315 TEST(allocInfo.size == 2ull * 1024 * 1024);
3316 }
3317
3318 // Case: Shrink allocation at the end.
3319 {
3320 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3321 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3322 TEST(allocInfo.size == 1ull * 1024 * 1024);
3323 }
3324
3325 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3326
3327 // Case: Shrink allocation before free space.
3328 {
3329 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3330 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3331 TEST(allocInfo.size == 512 * KILOBYTE);
3332 }
3333
3334 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3335
3336 // Case: Shrink allocation before next allocation.
3337 {
3338 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3339 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3340 TEST(allocInfo.size == 1 * MEGABYTE);
3341 }
3342
3343 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3344
3345 // Case: Grow allocation while there is even more space available.
3346 {
3347 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3348 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3349 TEST(allocInfo.size == 1 * MEGABYTE);
3350 }
3351
3352 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3353
3354 // Case: Grow allocation while there is exact amount of free space available.
3355 {
3356 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3357 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3358 TEST(allocInfo.size == 2 * MEGABYTE);
3359 }
3360
3361 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3362
3363 // Case: Fail to grow when there is not enough free space due to next allocation.
3364 {
3365 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3366 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3367 TEST(allocInfo.size == 2 * MEGABYTE);
3368 }
3369
3370 // Case: Fail to grow when there is not enough free space due to end of memory block.
3371 {
3372 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3373 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3374 TEST(allocInfo.size == 1 * MEGABYTE);
3375 }
3376
3377 for(uint32_t i = 4; i--; )
3378 {
3379 vmaFreeMemory(g_hAllocator, allocs[i]);
3380 }
3381
3382 vmaDestroyPool(g_hAllocator, pool);
3383
3384 // Test dedicated allocation
3385 {
3386 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3387 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3388 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3389
3390 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3391 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3392
3393 // Case: Resize to the same size always succeeds.
3394 {
3395 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3396 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3397 TEST(allocInfo.size == 2ull * 1024 * 1024);
3398 }
3399
3400 // Case: Shrinking fails.
3401 {
3402 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3403 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3404 TEST(allocInfo.size == 2ull * 1024 * 1024);
3405 }
3406
3407 // Case: Growing fails.
3408 {
3409 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3410 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3411 TEST(allocInfo.size == 2ull * 1024 * 1024);
3412 }
3413
3414 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3415 }
3416}
3417
Adam Sawickie44c6262018-06-15 14:30:39 +02003418static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3419{
3420 const uint8_t* pBytes = (const uint8_t*)pMemory;
3421 for(size_t i = 0; i < size; ++i)
3422 {
3423 if(pBytes[i] != pattern)
3424 {
3425 return false;
3426 }
3427 }
3428 return true;
3429}
3430
3431static void TestAllocationsInitialization()
3432{
3433 VkResult res;
3434
3435 const size_t BUF_SIZE = 1024;
3436
3437 // Create pool.
3438
3439 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3440 bufInfo.size = BUF_SIZE;
3441 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3442
3443 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3444 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3445
3446 VmaPoolCreateInfo poolCreateInfo = {};
3447 poolCreateInfo.blockSize = BUF_SIZE * 10;
3448 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3449 poolCreateInfo.maxBlockCount = 1;
3450 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003451 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003452
3453 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3454 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003455 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003456
3457 // Create one persistently mapped buffer to keep memory of this block mapped,
3458 // so that pointer to mapped data will remain (more or less...) valid even
3459 // after destruction of other allocations.
3460
3461 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3462 VkBuffer firstBuf;
3463 VmaAllocation firstAlloc;
3464 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003465 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003466
3467 // Test buffers.
3468
3469 for(uint32_t i = 0; i < 2; ++i)
3470 {
3471 const bool persistentlyMapped = i == 0;
3472 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3473 VkBuffer buf;
3474 VmaAllocation alloc;
3475 VmaAllocationInfo allocInfo;
3476 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003477 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003478
3479 void* pMappedData;
3480 if(!persistentlyMapped)
3481 {
3482 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003483 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003484 }
3485 else
3486 {
3487 pMappedData = allocInfo.pMappedData;
3488 }
3489
3490 // Validate initialized content
3491 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003492 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003493
3494 if(!persistentlyMapped)
3495 {
3496 vmaUnmapMemory(g_hAllocator, alloc);
3497 }
3498
3499 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3500
3501 // Validate freed content
3502 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003503 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003504 }
3505
3506 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3507 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3508}
3509
Adam Sawickib8333fb2018-03-13 16:15:53 +01003510static void TestPool_Benchmark(
3511 PoolTestResult& outResult,
3512 const PoolTestConfig& config)
3513{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003514 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003515
3516 RandomNumberGenerator mainRand{config.RandSeed};
3517
3518 uint32_t allocationSizeProbabilitySum = std::accumulate(
3519 config.AllocationSizes.begin(),
3520 config.AllocationSizes.end(),
3521 0u,
3522 [](uint32_t sum, const AllocationSize& allocSize) {
3523 return sum + allocSize.Probability;
3524 });
3525
3526 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3527 bufferInfo.size = 256; // Whatever.
3528 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3529
3530 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3531 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3532 imageInfo.extent.width = 256; // Whatever.
3533 imageInfo.extent.height = 256; // Whatever.
3534 imageInfo.extent.depth = 1;
3535 imageInfo.mipLevels = 1;
3536 imageInfo.arrayLayers = 1;
3537 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3538 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3539 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3540 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3541 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3542
3543 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3544 {
3545 VkBuffer dummyBuffer;
3546 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003547 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003548
3549 VkMemoryRequirements memReq;
3550 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3551 bufferMemoryTypeBits = memReq.memoryTypeBits;
3552
3553 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3554 }
3555
3556 uint32_t imageMemoryTypeBits = UINT32_MAX;
3557 {
3558 VkImage dummyImage;
3559 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003560 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003561
3562 VkMemoryRequirements memReq;
3563 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3564 imageMemoryTypeBits = memReq.memoryTypeBits;
3565
3566 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3567 }
3568
3569 uint32_t memoryTypeBits = 0;
3570 if(config.UsesBuffers() && config.UsesImages())
3571 {
3572 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3573 if(memoryTypeBits == 0)
3574 {
3575 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3576 return;
3577 }
3578 }
3579 else if(config.UsesBuffers())
3580 memoryTypeBits = bufferMemoryTypeBits;
3581 else if(config.UsesImages())
3582 memoryTypeBits = imageMemoryTypeBits;
3583 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003584 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003585
3586 VmaPoolCreateInfo poolCreateInfo = {};
3587 poolCreateInfo.memoryTypeIndex = 0;
3588 poolCreateInfo.minBlockCount = 1;
3589 poolCreateInfo.maxBlockCount = 1;
3590 poolCreateInfo.blockSize = config.PoolSize;
3591 poolCreateInfo.frameInUseCount = 1;
3592
3593 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3594 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3595 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3596
3597 VmaPool pool;
3598 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003599 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003600
3601 // Start time measurement - after creating pool and initializing data structures.
3602 time_point timeBeg = std::chrono::high_resolution_clock::now();
3603
3604 ////////////////////////////////////////////////////////////////////////////////
3605 // ThreadProc
3606 auto ThreadProc = [&](
3607 PoolTestThreadResult* outThreadResult,
3608 uint32_t randSeed,
3609 HANDLE frameStartEvent,
3610 HANDLE frameEndEvent) -> void
3611 {
3612 RandomNumberGenerator threadRand{randSeed};
3613
3614 outThreadResult->AllocationTimeMin = duration::max();
3615 outThreadResult->AllocationTimeSum = duration::zero();
3616 outThreadResult->AllocationTimeMax = duration::min();
3617 outThreadResult->DeallocationTimeMin = duration::max();
3618 outThreadResult->DeallocationTimeSum = duration::zero();
3619 outThreadResult->DeallocationTimeMax = duration::min();
3620 outThreadResult->AllocationCount = 0;
3621 outThreadResult->DeallocationCount = 0;
3622 outThreadResult->LostAllocationCount = 0;
3623 outThreadResult->LostAllocationTotalSize = 0;
3624 outThreadResult->FailedAllocationCount = 0;
3625 outThreadResult->FailedAllocationTotalSize = 0;
3626
3627 struct Item
3628 {
3629 VkDeviceSize BufferSize;
3630 VkExtent2D ImageSize;
3631 VkBuffer Buf;
3632 VkImage Image;
3633 VmaAllocation Alloc;
3634
3635 VkDeviceSize CalcSizeBytes() const
3636 {
3637 return BufferSize +
3638 ImageSize.width * ImageSize.height * 4;
3639 }
3640 };
3641 std::vector<Item> unusedItems, usedItems;
3642
3643 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3644
3645 // Create all items - all unused, not yet allocated.
3646 for(size_t i = 0; i < threadTotalItemCount; ++i)
3647 {
3648 Item item = {};
3649
3650 uint32_t allocSizeIndex = 0;
3651 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3652 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3653 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3654
3655 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3656 if(allocSize.BufferSizeMax > 0)
3657 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003658 TEST(allocSize.BufferSizeMin > 0);
3659 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003660 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3661 item.BufferSize = allocSize.BufferSizeMin;
3662 else
3663 {
3664 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3665 item.BufferSize = item.BufferSize / 16 * 16;
3666 }
3667 }
3668 else
3669 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003670 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003671 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3672 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3673 else
3674 {
3675 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3676 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3677 }
3678 }
3679
3680 unusedItems.push_back(item);
3681 }
3682
3683 auto Allocate = [&](Item& item) -> VkResult
3684 {
3685 VmaAllocationCreateInfo allocCreateInfo = {};
3686 allocCreateInfo.pool = pool;
3687 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3688 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3689
3690 if(item.BufferSize)
3691 {
3692 bufferInfo.size = item.BufferSize;
3693 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3694 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3695 }
3696 else
3697 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003698 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003699
3700 imageInfo.extent.width = item.ImageSize.width;
3701 imageInfo.extent.height = item.ImageSize.height;
3702 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3703 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3704 }
3705 };
3706
3707 ////////////////////////////////////////////////////////////////////////////////
3708 // Frames
3709 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3710 {
3711 WaitForSingleObject(frameStartEvent, INFINITE);
3712
3713 // Always make some percent of used bufs unused, to choose different used ones.
3714 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3715 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3716 {
3717 size_t index = threadRand.Generate() % usedItems.size();
3718 unusedItems.push_back(usedItems[index]);
3719 usedItems.erase(usedItems.begin() + index);
3720 }
3721
3722 // Determine which bufs we want to use in this frame.
3723 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3724 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003725 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003726 // Move some used to unused.
3727 while(usedBufCount < usedItems.size())
3728 {
3729 size_t index = threadRand.Generate() % usedItems.size();
3730 unusedItems.push_back(usedItems[index]);
3731 usedItems.erase(usedItems.begin() + index);
3732 }
3733 // Move some unused to used.
3734 while(usedBufCount > usedItems.size())
3735 {
3736 size_t index = threadRand.Generate() % unusedItems.size();
3737 usedItems.push_back(unusedItems[index]);
3738 unusedItems.erase(unusedItems.begin() + index);
3739 }
3740
3741 uint32_t touchExistingCount = 0;
3742 uint32_t touchLostCount = 0;
3743 uint32_t createSucceededCount = 0;
3744 uint32_t createFailedCount = 0;
3745
3746 // Touch all used bufs. If not created or lost, allocate.
3747 for(size_t i = 0; i < usedItems.size(); ++i)
3748 {
3749 Item& item = usedItems[i];
3750 // Not yet created.
3751 if(item.Alloc == VK_NULL_HANDLE)
3752 {
3753 res = Allocate(item);
3754 ++outThreadResult->AllocationCount;
3755 if(res != VK_SUCCESS)
3756 {
3757 item.Alloc = VK_NULL_HANDLE;
3758 item.Buf = VK_NULL_HANDLE;
3759 ++outThreadResult->FailedAllocationCount;
3760 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3761 ++createFailedCount;
3762 }
3763 else
3764 ++createSucceededCount;
3765 }
3766 else
3767 {
3768 // Touch.
3769 VmaAllocationInfo allocInfo;
3770 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3771 // Lost.
3772 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3773 {
3774 ++touchLostCount;
3775
3776 // Destroy.
3777 {
3778 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3779 if(item.Buf)
3780 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3781 else
3782 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3783 ++outThreadResult->DeallocationCount;
3784 }
3785 item.Alloc = VK_NULL_HANDLE;
3786 item.Buf = VK_NULL_HANDLE;
3787
3788 ++outThreadResult->LostAllocationCount;
3789 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3790
3791 // Recreate.
3792 res = Allocate(item);
3793 ++outThreadResult->AllocationCount;
3794 // Creation failed.
3795 if(res != VK_SUCCESS)
3796 {
3797 ++outThreadResult->FailedAllocationCount;
3798 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3799 ++createFailedCount;
3800 }
3801 else
3802 ++createSucceededCount;
3803 }
3804 else
3805 ++touchExistingCount;
3806 }
3807 }
3808
3809 /*
3810 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3811 randSeed, frameIndex,
3812 touchExistingCount, touchLostCount,
3813 createSucceededCount, createFailedCount);
3814 */
3815
3816 SetEvent(frameEndEvent);
3817 }
3818
3819 // Free all remaining items.
3820 for(size_t i = usedItems.size(); i--; )
3821 {
3822 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3823 if(usedItems[i].Buf)
3824 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3825 else
3826 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3827 ++outThreadResult->DeallocationCount;
3828 }
3829 for(size_t i = unusedItems.size(); i--; )
3830 {
3831 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3832 if(unusedItems[i].Buf)
3833 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3834 else
3835 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3836 ++outThreadResult->DeallocationCount;
3837 }
3838 };
3839
3840 // Launch threads.
3841 uint32_t threadRandSeed = mainRand.Generate();
3842 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3843 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3844 std::vector<std::thread> bkgThreads;
3845 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3846 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3847 {
3848 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3849 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3850 bkgThreads.emplace_back(std::bind(
3851 ThreadProc,
3852 &threadResults[threadIndex],
3853 threadRandSeed + threadIndex,
3854 frameStartEvents[threadIndex],
3855 frameEndEvents[threadIndex]));
3856 }
3857
3858 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003859 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003860 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3861 {
3862 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3863 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3864 SetEvent(frameStartEvents[threadIndex]);
3865 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3866 }
3867
3868 // Wait for threads finished
3869 for(size_t i = 0; i < bkgThreads.size(); ++i)
3870 {
3871 bkgThreads[i].join();
3872 CloseHandle(frameEndEvents[i]);
3873 CloseHandle(frameStartEvents[i]);
3874 }
3875 bkgThreads.clear();
3876
3877 // Finish time measurement - before destroying pool.
3878 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3879
3880 vmaDestroyPool(g_hAllocator, pool);
3881
3882 outResult.AllocationTimeMin = duration::max();
3883 outResult.AllocationTimeAvg = duration::zero();
3884 outResult.AllocationTimeMax = duration::min();
3885 outResult.DeallocationTimeMin = duration::max();
3886 outResult.DeallocationTimeAvg = duration::zero();
3887 outResult.DeallocationTimeMax = duration::min();
3888 outResult.LostAllocationCount = 0;
3889 outResult.LostAllocationTotalSize = 0;
3890 outResult.FailedAllocationCount = 0;
3891 outResult.FailedAllocationTotalSize = 0;
3892 size_t allocationCount = 0;
3893 size_t deallocationCount = 0;
3894 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3895 {
3896 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3897 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3898 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3899 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3900 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3901 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3902 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3903 allocationCount += threadResult.AllocationCount;
3904 deallocationCount += threadResult.DeallocationCount;
3905 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3906 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3907 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3908 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3909 }
3910 if(allocationCount)
3911 outResult.AllocationTimeAvg /= allocationCount;
3912 if(deallocationCount)
3913 outResult.DeallocationTimeAvg /= deallocationCount;
3914}
3915
3916static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3917{
3918 if(ptr1 < ptr2)
3919 return ptr1 + size1 > ptr2;
3920 else if(ptr2 < ptr1)
3921 return ptr2 + size2 > ptr1;
3922 else
3923 return true;
3924}
3925
3926static void TestMapping()
3927{
3928 wprintf(L"Testing mapping...\n");
3929
3930 VkResult res;
3931 uint32_t memTypeIndex = UINT32_MAX;
3932
3933 enum TEST
3934 {
3935 TEST_NORMAL,
3936 TEST_POOL,
3937 TEST_DEDICATED,
3938 TEST_COUNT
3939 };
3940 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3941 {
3942 VmaPool pool = nullptr;
3943 if(testIndex == TEST_POOL)
3944 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003945 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003946 VmaPoolCreateInfo poolInfo = {};
3947 poolInfo.memoryTypeIndex = memTypeIndex;
3948 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003949 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003950 }
3951
3952 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3953 bufInfo.size = 0x10000;
3954 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3955
3956 VmaAllocationCreateInfo allocCreateInfo = {};
3957 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3958 allocCreateInfo.pool = pool;
3959 if(testIndex == TEST_DEDICATED)
3960 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3961
3962 VmaAllocationInfo allocInfo;
3963
3964 // Mapped manually
3965
3966 // Create 2 buffers.
3967 BufferInfo bufferInfos[3];
3968 for(size_t i = 0; i < 2; ++i)
3969 {
3970 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3971 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003972 TEST(res == VK_SUCCESS);
3973 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003974 memTypeIndex = allocInfo.memoryType;
3975 }
3976
3977 // Map buffer 0.
3978 char* data00 = nullptr;
3979 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003980 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003981 data00[0xFFFF] = data00[0];
3982
3983 // Map buffer 0 second time.
3984 char* data01 = nullptr;
3985 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003986 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003987
3988 // Map buffer 1.
3989 char* data1 = nullptr;
3990 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003991 TEST(res == VK_SUCCESS && data1 != nullptr);
3992 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01003993 data1[0xFFFF] = data1[0];
3994
3995 // Unmap buffer 0 two times.
3996 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3997 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3998 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003999 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004000
4001 // Unmap buffer 1.
4002 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4003 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004004 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004005
4006 // Create 3rd buffer - persistently mapped.
4007 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4008 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4009 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004010 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004011
4012 // Map buffer 2.
4013 char* data2 = nullptr;
4014 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004015 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004016 data2[0xFFFF] = data2[0];
4017
4018 // Unmap buffer 2.
4019 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4020 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004021 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004022
4023 // Destroy all buffers.
4024 for(size_t i = 3; i--; )
4025 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4026
4027 vmaDestroyPool(g_hAllocator, pool);
4028 }
4029}
4030
4031static void TestMappingMultithreaded()
4032{
4033 wprintf(L"Testing mapping multithreaded...\n");
4034
4035 static const uint32_t threadCount = 16;
4036 static const uint32_t bufferCount = 1024;
4037 static const uint32_t threadBufferCount = bufferCount / threadCount;
4038
4039 VkResult res;
4040 volatile uint32_t memTypeIndex = UINT32_MAX;
4041
4042 enum TEST
4043 {
4044 TEST_NORMAL,
4045 TEST_POOL,
4046 TEST_DEDICATED,
4047 TEST_COUNT
4048 };
4049 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4050 {
4051 VmaPool pool = nullptr;
4052 if(testIndex == TEST_POOL)
4053 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004054 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004055 VmaPoolCreateInfo poolInfo = {};
4056 poolInfo.memoryTypeIndex = memTypeIndex;
4057 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004058 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004059 }
4060
4061 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4062 bufCreateInfo.size = 0x10000;
4063 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4064
4065 VmaAllocationCreateInfo allocCreateInfo = {};
4066 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4067 allocCreateInfo.pool = pool;
4068 if(testIndex == TEST_DEDICATED)
4069 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4070
4071 std::thread threads[threadCount];
4072 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4073 {
4074 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4075 // ======== THREAD FUNCTION ========
4076
4077 RandomNumberGenerator rand{threadIndex};
4078
4079 enum class MODE
4080 {
4081 // Don't map this buffer at all.
4082 DONT_MAP,
4083 // Map and quickly unmap.
4084 MAP_FOR_MOMENT,
4085 // Map and unmap before destruction.
4086 MAP_FOR_LONGER,
4087 // Map two times. Quickly unmap, second unmap before destruction.
4088 MAP_TWO_TIMES,
4089 // Create this buffer as persistently mapped.
4090 PERSISTENTLY_MAPPED,
4091 COUNT
4092 };
4093 std::vector<BufferInfo> bufInfos{threadBufferCount};
4094 std::vector<MODE> bufModes{threadBufferCount};
4095
4096 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4097 {
4098 BufferInfo& bufInfo = bufInfos[bufferIndex];
4099 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4100 bufModes[bufferIndex] = mode;
4101
4102 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4103 if(mode == MODE::PERSISTENTLY_MAPPED)
4104 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4105
4106 VmaAllocationInfo allocInfo;
4107 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4108 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004109 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004110
4111 if(memTypeIndex == UINT32_MAX)
4112 memTypeIndex = allocInfo.memoryType;
4113
4114 char* data = nullptr;
4115
4116 if(mode == MODE::PERSISTENTLY_MAPPED)
4117 {
4118 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004119 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004120 }
4121 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4122 mode == MODE::MAP_TWO_TIMES)
4123 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004124 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004125 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004126 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004127
4128 if(mode == MODE::MAP_TWO_TIMES)
4129 {
4130 char* data2 = nullptr;
4131 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004132 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004133 }
4134 }
4135 else if(mode == MODE::DONT_MAP)
4136 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004137 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004138 }
4139 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004140 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004141
4142 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4143 if(data)
4144 data[0xFFFF] = data[0];
4145
4146 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4147 {
4148 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4149
4150 VmaAllocationInfo allocInfo;
4151 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4152 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004153 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004154 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004155 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004156 }
4157
4158 switch(rand.Generate() % 3)
4159 {
4160 case 0: Sleep(0); break; // Yield.
4161 case 1: Sleep(10); break; // 10 ms
4162 // default: No sleep.
4163 }
4164
4165 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4166 if(data)
4167 data[0xFFFF] = data[0];
4168 }
4169
4170 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4171 {
4172 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4173 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4174 {
4175 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4176
4177 VmaAllocationInfo allocInfo;
4178 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004179 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004180 }
4181
4182 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4183 }
4184 });
4185 }
4186
4187 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4188 threads[threadIndex].join();
4189
4190 vmaDestroyPool(g_hAllocator, pool);
4191 }
4192}
4193
4194static void WriteMainTestResultHeader(FILE* file)
4195{
4196 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004197 "Code,Time,"
4198 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004199 "Total Time (us),"
4200 "Allocation Time Min (us),"
4201 "Allocation Time Avg (us),"
4202 "Allocation Time Max (us),"
4203 "Deallocation Time Min (us),"
4204 "Deallocation Time Avg (us),"
4205 "Deallocation Time Max (us),"
4206 "Total Memory Allocated (B),"
4207 "Free Range Size Avg (B),"
4208 "Free Range Size Max (B)\n");
4209}
4210
4211static void WriteMainTestResult(
4212 FILE* file,
4213 const char* codeDescription,
4214 const char* testDescription,
4215 const Config& config, const Result& result)
4216{
4217 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4218 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4219 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4220 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4221 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4222 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4223 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4224
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004225 std::string currTime;
4226 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004227
4228 fprintf(file,
4229 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004230 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4231 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004232 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004233 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004234 totalTimeSeconds * 1e6f,
4235 allocationTimeMinSeconds * 1e6f,
4236 allocationTimeAvgSeconds * 1e6f,
4237 allocationTimeMaxSeconds * 1e6f,
4238 deallocationTimeMinSeconds * 1e6f,
4239 deallocationTimeAvgSeconds * 1e6f,
4240 deallocationTimeMaxSeconds * 1e6f,
4241 result.TotalMemoryAllocated,
4242 result.FreeRangeSizeAvg,
4243 result.FreeRangeSizeMax);
4244}
4245
4246static void WritePoolTestResultHeader(FILE* file)
4247{
4248 fprintf(file,
4249 "Code,Test,Time,"
4250 "Config,"
4251 "Total Time (us),"
4252 "Allocation Time Min (us),"
4253 "Allocation Time Avg (us),"
4254 "Allocation Time Max (us),"
4255 "Deallocation Time Min (us),"
4256 "Deallocation Time Avg (us),"
4257 "Deallocation Time Max (us),"
4258 "Lost Allocation Count,"
4259 "Lost Allocation Total Size (B),"
4260 "Failed Allocation Count,"
4261 "Failed Allocation Total Size (B)\n");
4262}
4263
4264static void WritePoolTestResult(
4265 FILE* file,
4266 const char* codeDescription,
4267 const char* testDescription,
4268 const PoolTestConfig& config,
4269 const PoolTestResult& result)
4270{
4271 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4272 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4273 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4274 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4275 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4276 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4277 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4278
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004279 std::string currTime;
4280 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004281
4282 fprintf(file,
4283 "%s,%s,%s,"
4284 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4285 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4286 // General
4287 codeDescription,
4288 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004289 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004290 // Config
4291 config.ThreadCount,
4292 (unsigned long long)config.PoolSize,
4293 config.FrameCount,
4294 config.TotalItemCount,
4295 config.UsedItemCountMin,
4296 config.UsedItemCountMax,
4297 config.ItemsToMakeUnusedPercent,
4298 // Results
4299 totalTimeSeconds * 1e6f,
4300 allocationTimeMinSeconds * 1e6f,
4301 allocationTimeAvgSeconds * 1e6f,
4302 allocationTimeMaxSeconds * 1e6f,
4303 deallocationTimeMinSeconds * 1e6f,
4304 deallocationTimeAvgSeconds * 1e6f,
4305 deallocationTimeMaxSeconds * 1e6f,
4306 result.LostAllocationCount,
4307 result.LostAllocationTotalSize,
4308 result.FailedAllocationCount,
4309 result.FailedAllocationTotalSize);
4310}
4311
4312static void PerformCustomMainTest(FILE* file)
4313{
4314 Config config{};
4315 config.RandSeed = 65735476;
4316 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4317 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4318 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4319 config.FreeOrder = FREE_ORDER::FORWARD;
4320 config.ThreadCount = 16;
4321 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004322 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004323
4324 // Buffers
4325 //config.AllocationSizes.push_back({4, 16, 1024});
4326 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4327
4328 // Images
4329 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4330 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4331
4332 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4333 config.AdditionalOperationCount = 1024;
4334
4335 Result result{};
4336 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004337 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004338 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4339}
4340
4341static void PerformCustomPoolTest(FILE* file)
4342{
4343 PoolTestConfig config;
4344 config.PoolSize = 100 * 1024 * 1024;
4345 config.RandSeed = 2345764;
4346 config.ThreadCount = 1;
4347 config.FrameCount = 200;
4348 config.ItemsToMakeUnusedPercent = 2;
4349
4350 AllocationSize allocSize = {};
4351 allocSize.BufferSizeMin = 1024;
4352 allocSize.BufferSizeMax = 1024 * 1024;
4353 allocSize.Probability = 1;
4354 config.AllocationSizes.push_back(allocSize);
4355
4356 allocSize.BufferSizeMin = 0;
4357 allocSize.BufferSizeMax = 0;
4358 allocSize.ImageSizeMin = 128;
4359 allocSize.ImageSizeMax = 1024;
4360 allocSize.Probability = 1;
4361 config.AllocationSizes.push_back(allocSize);
4362
4363 config.PoolSize = config.CalcAvgResourceSize() * 200;
4364 config.UsedItemCountMax = 160;
4365 config.TotalItemCount = config.UsedItemCountMax * 10;
4366 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4367
4368 g_MemoryAliasingWarningEnabled = false;
4369 PoolTestResult result = {};
4370 TestPool_Benchmark(result, config);
4371 g_MemoryAliasingWarningEnabled = true;
4372
4373 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4374}
4375
Adam Sawickib8333fb2018-03-13 16:15:53 +01004376static void PerformMainTests(FILE* file)
4377{
4378 uint32_t repeatCount = 1;
4379 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4380
4381 Config config{};
4382 config.RandSeed = 65735476;
4383 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4384 config.FreeOrder = FREE_ORDER::FORWARD;
4385
4386 size_t threadCountCount = 1;
4387 switch(ConfigType)
4388 {
4389 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4390 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4391 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4392 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4393 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4394 default: assert(0);
4395 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004396
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004397 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004398
Adam Sawickib8333fb2018-03-13 16:15:53 +01004399 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4400 {
4401 std::string desc1;
4402
4403 switch(threadCountIndex)
4404 {
4405 case 0:
4406 desc1 += "1_thread";
4407 config.ThreadCount = 1;
4408 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4409 break;
4410 case 1:
4411 desc1 += "16_threads+0%_common";
4412 config.ThreadCount = 16;
4413 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4414 break;
4415 case 2:
4416 desc1 += "16_threads+50%_common";
4417 config.ThreadCount = 16;
4418 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4419 break;
4420 case 3:
4421 desc1 += "16_threads+100%_common";
4422 config.ThreadCount = 16;
4423 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4424 break;
4425 case 4:
4426 desc1 += "2_threads+0%_common";
4427 config.ThreadCount = 2;
4428 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4429 break;
4430 case 5:
4431 desc1 += "2_threads+50%_common";
4432 config.ThreadCount = 2;
4433 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4434 break;
4435 case 6:
4436 desc1 += "2_threads+100%_common";
4437 config.ThreadCount = 2;
4438 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4439 break;
4440 default:
4441 assert(0);
4442 }
4443
4444 // 0 = buffers, 1 = images, 2 = buffers and images
4445 size_t buffersVsImagesCount = 2;
4446 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4447 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4448 {
4449 std::string desc2 = desc1;
4450 switch(buffersVsImagesIndex)
4451 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004452 case 0: desc2 += ",Buffers"; break;
4453 case 1: desc2 += ",Images"; break;
4454 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004455 default: assert(0);
4456 }
4457
4458 // 0 = small, 1 = large, 2 = small and large
4459 size_t smallVsLargeCount = 2;
4460 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4461 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4462 {
4463 std::string desc3 = desc2;
4464 switch(smallVsLargeIndex)
4465 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004466 case 0: desc3 += ",Small"; break;
4467 case 1: desc3 += ",Large"; break;
4468 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004469 default: assert(0);
4470 }
4471
4472 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4473 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4474 else
4475 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4476
4477 // 0 = varying sizes min...max, 1 = set of constant sizes
4478 size_t constantSizesCount = 1;
4479 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4480 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4481 {
4482 std::string desc4 = desc3;
4483 switch(constantSizesIndex)
4484 {
4485 case 0: desc4 += " Varying_sizes"; break;
4486 case 1: desc4 += " Constant_sizes"; break;
4487 default: assert(0);
4488 }
4489
4490 config.AllocationSizes.clear();
4491 // Buffers present
4492 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4493 {
4494 // Small
4495 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4496 {
4497 // Varying size
4498 if(constantSizesIndex == 0)
4499 config.AllocationSizes.push_back({4, 16, 1024});
4500 // Constant sizes
4501 else
4502 {
4503 config.AllocationSizes.push_back({1, 16, 16});
4504 config.AllocationSizes.push_back({1, 64, 64});
4505 config.AllocationSizes.push_back({1, 256, 256});
4506 config.AllocationSizes.push_back({1, 1024, 1024});
4507 }
4508 }
4509 // Large
4510 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4511 {
4512 // Varying size
4513 if(constantSizesIndex == 0)
4514 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4515 // Constant sizes
4516 else
4517 {
4518 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4519 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4520 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4521 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4522 }
4523 }
4524 }
4525 // Images present
4526 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4527 {
4528 // Small
4529 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4530 {
4531 // Varying size
4532 if(constantSizesIndex == 0)
4533 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4534 // Constant sizes
4535 else
4536 {
4537 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4538 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4539 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4540 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4541 }
4542 }
4543 // Large
4544 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4545 {
4546 // Varying size
4547 if(constantSizesIndex == 0)
4548 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4549 // Constant sizes
4550 else
4551 {
4552 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4553 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4554 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4555 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4556 }
4557 }
4558 }
4559
4560 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4561 size_t beginBytesToAllocateCount = 1;
4562 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4563 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4564 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4565 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4566 {
4567 std::string desc5 = desc4;
4568
4569 switch(beginBytesToAllocateIndex)
4570 {
4571 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004572 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004573 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4574 config.AdditionalOperationCount = 0;
4575 break;
4576 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004577 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004578 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4579 config.AdditionalOperationCount = 1024;
4580 break;
4581 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004582 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004583 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4584 config.AdditionalOperationCount = 1024;
4585 break;
4586 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004587 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004588 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4589 config.AdditionalOperationCount = 1024;
4590 break;
4591 default:
4592 assert(0);
4593 }
4594
Adam Sawicki0667e332018-08-24 17:26:44 +02004595 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004596 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004597 std::string desc6 = desc5;
4598 switch(strategyIndex)
4599 {
4600 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004601 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004602 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4603 break;
4604 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004605 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004606 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4607 break;
4608 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004609 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004610 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4611 break;
4612 default:
4613 assert(0);
4614 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004615
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004616 desc6 += ',';
4617 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004618
4619 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004620
4621 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4622 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004623 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004624
4625 Result result{};
4626 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004627 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004628 if(file)
4629 {
4630 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4631 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004632 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004633 }
4634 }
4635 }
4636 }
4637 }
4638 }
4639}
4640
4641static void PerformPoolTests(FILE* file)
4642{
4643 const size_t AVG_RESOURCES_PER_POOL = 300;
4644
4645 uint32_t repeatCount = 1;
4646 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4647
4648 PoolTestConfig config{};
4649 config.RandSeed = 2346343;
4650 config.FrameCount = 200;
4651 config.ItemsToMakeUnusedPercent = 2;
4652
4653 size_t threadCountCount = 1;
4654 switch(ConfigType)
4655 {
4656 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4657 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4658 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4659 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4660 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4661 default: assert(0);
4662 }
4663 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4664 {
4665 std::string desc1;
4666
4667 switch(threadCountIndex)
4668 {
4669 case 0:
4670 desc1 += "1_thread";
4671 config.ThreadCount = 1;
4672 break;
4673 case 1:
4674 desc1 += "16_threads";
4675 config.ThreadCount = 16;
4676 break;
4677 case 2:
4678 desc1 += "2_threads";
4679 config.ThreadCount = 2;
4680 break;
4681 default:
4682 assert(0);
4683 }
4684
4685 // 0 = buffers, 1 = images, 2 = buffers and images
4686 size_t buffersVsImagesCount = 2;
4687 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4688 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4689 {
4690 std::string desc2 = desc1;
4691 switch(buffersVsImagesIndex)
4692 {
4693 case 0: desc2 += " Buffers"; break;
4694 case 1: desc2 += " Images"; break;
4695 case 2: desc2 += " Buffers+Images"; break;
4696 default: assert(0);
4697 }
4698
4699 // 0 = small, 1 = large, 2 = small and large
4700 size_t smallVsLargeCount = 2;
4701 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4702 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4703 {
4704 std::string desc3 = desc2;
4705 switch(smallVsLargeIndex)
4706 {
4707 case 0: desc3 += " Small"; break;
4708 case 1: desc3 += " Large"; break;
4709 case 2: desc3 += " Small+Large"; break;
4710 default: assert(0);
4711 }
4712
4713 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4714 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4715 else
4716 config.PoolSize = 4ull * 1024 * 1024;
4717
4718 // 0 = varying sizes min...max, 1 = set of constant sizes
4719 size_t constantSizesCount = 1;
4720 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4721 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4722 {
4723 std::string desc4 = desc3;
4724 switch(constantSizesIndex)
4725 {
4726 case 0: desc4 += " Varying_sizes"; break;
4727 case 1: desc4 += " Constant_sizes"; break;
4728 default: assert(0);
4729 }
4730
4731 config.AllocationSizes.clear();
4732 // Buffers present
4733 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4734 {
4735 // Small
4736 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4737 {
4738 // Varying size
4739 if(constantSizesIndex == 0)
4740 config.AllocationSizes.push_back({4, 16, 1024});
4741 // Constant sizes
4742 else
4743 {
4744 config.AllocationSizes.push_back({1, 16, 16});
4745 config.AllocationSizes.push_back({1, 64, 64});
4746 config.AllocationSizes.push_back({1, 256, 256});
4747 config.AllocationSizes.push_back({1, 1024, 1024});
4748 }
4749 }
4750 // Large
4751 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4752 {
4753 // Varying size
4754 if(constantSizesIndex == 0)
4755 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4756 // Constant sizes
4757 else
4758 {
4759 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4760 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4761 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4762 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4763 }
4764 }
4765 }
4766 // Images present
4767 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4768 {
4769 // Small
4770 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4771 {
4772 // Varying size
4773 if(constantSizesIndex == 0)
4774 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4775 // Constant sizes
4776 else
4777 {
4778 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4779 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4780 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4781 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4782 }
4783 }
4784 // Large
4785 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4786 {
4787 // Varying size
4788 if(constantSizesIndex == 0)
4789 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4790 // Constant sizes
4791 else
4792 {
4793 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4794 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4795 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4796 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4797 }
4798 }
4799 }
4800
4801 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4802 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4803
4804 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4805 size_t subscriptionModeCount;
4806 switch(ConfigType)
4807 {
4808 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4809 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4810 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4811 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4812 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4813 default: assert(0);
4814 }
4815 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4816 {
4817 std::string desc5 = desc4;
4818
4819 switch(subscriptionModeIndex)
4820 {
4821 case 0:
4822 desc5 += " Subscription_66%";
4823 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4824 break;
4825 case 1:
4826 desc5 += " Subscription_133%";
4827 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4828 break;
4829 case 2:
4830 desc5 += " Subscription_100%";
4831 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4832 break;
4833 case 3:
4834 desc5 += " Subscription_33%";
4835 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4836 break;
4837 case 4:
4838 desc5 += " Subscription_166%";
4839 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4840 break;
4841 default:
4842 assert(0);
4843 }
4844
4845 config.TotalItemCount = config.UsedItemCountMax * 5;
4846 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4847
4848 const char* testDescription = desc5.c_str();
4849
4850 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4851 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004852 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004853
4854 PoolTestResult result{};
4855 g_MemoryAliasingWarningEnabled = false;
4856 TestPool_Benchmark(result, config);
4857 g_MemoryAliasingWarningEnabled = true;
4858 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4859 }
4860 }
4861 }
4862 }
4863 }
4864 }
4865}
4866
Adam Sawickia83793a2018-09-03 13:40:42 +02004867static void BasicTestBuddyAllocator()
4868{
4869 wprintf(L"Basic test buddy allocator\n");
4870
4871 RandomNumberGenerator rand{76543};
4872
4873 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4874 sampleBufCreateInfo.size = 1024; // Whatever.
4875 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4876
4877 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4878 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4879
4880 VmaPoolCreateInfo poolCreateInfo = {};
4881 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004882 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004883
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004884 // Deliberately adding 1023 to test usable size smaller than memory block size.
4885 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004886 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004887 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004888
4889 VmaPool pool = nullptr;
4890 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004891 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004892
4893 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4894
4895 VmaAllocationCreateInfo allocCreateInfo = {};
4896 allocCreateInfo.pool = pool;
4897
4898 std::vector<BufferInfo> bufInfo;
4899 BufferInfo newBufInfo;
4900 VmaAllocationInfo allocInfo;
4901
4902 bufCreateInfo.size = 1024 * 256;
4903 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4904 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004905 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004906 bufInfo.push_back(newBufInfo);
4907
4908 bufCreateInfo.size = 1024 * 512;
4909 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4910 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004911 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004912 bufInfo.push_back(newBufInfo);
4913
4914 bufCreateInfo.size = 1024 * 128;
4915 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4916 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004917 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004918 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004919
4920 // Test very small allocation, smaller than minimum node size.
4921 bufCreateInfo.size = 1;
4922 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4923 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004924 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004925 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004926
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004927 // Test some small allocation with alignment requirement.
4928 {
4929 VkMemoryRequirements memReq;
4930 memReq.alignment = 256;
4931 memReq.memoryTypeBits = UINT32_MAX;
4932 memReq.size = 32;
4933
4934 newBufInfo.Buffer = VK_NULL_HANDLE;
4935 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4936 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004937 TEST(res == VK_SUCCESS);
4938 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004939 bufInfo.push_back(newBufInfo);
4940 }
4941
4942 //SaveAllocatorStatsToFile(L"TEST.json");
4943
Adam Sawicki21017c62018-09-07 15:26:59 +02004944 VmaPoolStats stats = {};
4945 vmaGetPoolStats(g_hAllocator, pool, &stats);
4946 int DBG = 0; // Set breakpoint here to inspect `stats`.
4947
Adam Sawicki80927152018-09-07 17:27:23 +02004948 // Allocate enough new buffers to surely fall into second block.
4949 for(uint32_t i = 0; i < 32; ++i)
4950 {
4951 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4952 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4953 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004954 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004955 bufInfo.push_back(newBufInfo);
4956 }
4957
4958 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4959
Adam Sawickia83793a2018-09-03 13:40:42 +02004960 // Destroy the buffers in random order.
4961 while(!bufInfo.empty())
4962 {
4963 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4964 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4965 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4966 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4967 }
4968
4969 vmaDestroyPool(g_hAllocator, pool);
4970}
4971
Adam Sawickif2975342018-10-16 13:49:02 +02004972// Test the testing environment.
4973static void TestGpuData()
4974{
4975 RandomNumberGenerator rand = { 53434 };
4976
4977 std::vector<AllocInfo> allocInfo;
4978
4979 for(size_t i = 0; i < 100; ++i)
4980 {
4981 AllocInfo info = {};
4982
4983 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4984 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
4985 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
4986 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4987 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
4988
4989 VmaAllocationCreateInfo allocCreateInfo = {};
4990 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4991
4992 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
4993 TEST(res == VK_SUCCESS);
4994
4995 info.m_StartValue = rand.Generate();
4996
4997 allocInfo.push_back(std::move(info));
4998 }
4999
5000 UploadGpuData(allocInfo.data(), allocInfo.size());
5001
5002 ValidateGpuData(allocInfo.data(), allocInfo.size());
5003
5004 DestroyAllAllocations(allocInfo);
5005}
5006
Adam Sawickib8333fb2018-03-13 16:15:53 +01005007void Test()
5008{
5009 wprintf(L"TESTING:\n");
5010
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005011 if(true)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005012 {
5013 // # Temporarily insert custom tests here
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02005014 // ########################################
5015 // ########################################
Adam Sawicki80927152018-09-07 17:27:23 +02005016
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005017 TestDefragmentationSimple();
5018 TestDefragmentationFull();
5019 TestDefragmentationGpu();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005020 TestDefragmentationWholePool();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005021 return;
5022 }
5023
Adam Sawickib8333fb2018-03-13 16:15:53 +01005024 // # Simple tests
5025
5026 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005027 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005028#if VMA_DEBUG_MARGIN
5029 TestDebugMargin();
5030#else
5031 TestPool_SameSize();
5032 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01005033 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02005034#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005035#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5036 TestAllocationsInitialization();
5037#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005038 TestMapping();
5039 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005040 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005041 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005042 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005043
Adam Sawicki4338f662018-09-07 14:12:37 +02005044 BasicTestBuddyAllocator();
5045
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005046 {
5047 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005048 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005049 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005050 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005051 fclose(file);
5052 }
5053
Adam Sawickib8333fb2018-03-13 16:15:53 +01005054 TestDefragmentationSimple();
5055 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005056 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005057 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005058
5059 // # Detailed tests
5060 FILE* file;
5061 fopen_s(&file, "Results.csv", "w");
5062 assert(file != NULL);
5063
5064 WriteMainTestResultHeader(file);
5065 PerformMainTests(file);
5066 //PerformCustomMainTest(file);
5067
5068 WritePoolTestResultHeader(file);
5069 PerformPoolTests(file);
5070 //PerformCustomPoolTest(file);
5071
5072 fclose(file);
5073
5074 wprintf(L"Done.\n");
5075}
5076
Adam Sawickif1a793c2018-03-13 15:42:22 +01005077#endif // #ifdef _WIN32