blob: 058fedb5a7674d2c01a5ad1fa85150c637334536 [file] [log] [blame]
Adam Sawickif1a793c2018-03-13 15:42:22 +01001#include "Tests.h"
2#include "VmaUsage.h"
3#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004#include <atomic>
5#include <thread>
6#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +01007
8#ifdef _WIN32
9
Adam Sawicki33d2ce72018-08-27 13:59:13 +020010static const char* CODE_DESCRIPTION = "Foo";
11
Adam Sawickif2975342018-10-16 13:49:02 +020012extern VkCommandBuffer g_hTemporaryCommandBuffer;
13void BeginSingleTimeCommands();
14void EndSingleTimeCommands();
15
Adam Sawickibdb89a92018-12-13 11:56:30 +010016#ifndef VMA_DEBUG_MARGIN
17 #define VMA_DEBUG_MARGIN 0
18#endif
19
Adam Sawicki0a607132018-08-24 11:18:41 +020020enum CONFIG_TYPE {
21 CONFIG_TYPE_MINIMUM,
22 CONFIG_TYPE_SMALL,
23 CONFIG_TYPE_AVERAGE,
24 CONFIG_TYPE_LARGE,
25 CONFIG_TYPE_MAXIMUM,
26 CONFIG_TYPE_COUNT
27};
28
Adam Sawickif2975342018-10-16 13:49:02 +020029static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
30//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020031
Adam Sawickib8333fb2018-03-13 16:15:53 +010032enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
33
Adam Sawicki0667e332018-08-24 17:26:44 +020034static const char* FREE_ORDER_NAMES[] = {
35 "FORWARD",
36 "BACKWARD",
37 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020038};
39
Adam Sawicki80927152018-09-07 17:27:23 +020040// Copy of internal VmaAlgorithmToStr.
41static const char* AlgorithmToStr(uint32_t algorithm)
42{
43 switch(algorithm)
44 {
45 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
46 return "Linear";
47 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
48 return "Buddy";
49 case 0:
50 return "Default";
51 default:
52 assert(0);
53 return "";
54 }
55}
56
Adam Sawickib8333fb2018-03-13 16:15:53 +010057struct AllocationSize
58{
59 uint32_t Probability;
60 VkDeviceSize BufferSizeMin, BufferSizeMax;
61 uint32_t ImageSizeMin, ImageSizeMax;
62};
63
64struct Config
65{
66 uint32_t RandSeed;
67 VkDeviceSize BeginBytesToAllocate;
68 uint32_t AdditionalOperationCount;
69 VkDeviceSize MaxBytesToAllocate;
70 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
71 std::vector<AllocationSize> AllocationSizes;
72 uint32_t ThreadCount;
73 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
74 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020075 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010076};
77
78struct Result
79{
80 duration TotalTime;
81 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
82 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
83 VkDeviceSize TotalMemoryAllocated;
84 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
85};
86
87void TestDefragmentationSimple();
88void TestDefragmentationFull();
89
90struct PoolTestConfig
91{
92 uint32_t RandSeed;
93 uint32_t ThreadCount;
94 VkDeviceSize PoolSize;
95 uint32_t FrameCount;
96 uint32_t TotalItemCount;
97 // Range for number of items used in each frame.
98 uint32_t UsedItemCountMin, UsedItemCountMax;
99 // Percent of items to make unused, and possibly make some others used in each frame.
100 uint32_t ItemsToMakeUnusedPercent;
101 std::vector<AllocationSize> AllocationSizes;
102
103 VkDeviceSize CalcAvgResourceSize() const
104 {
105 uint32_t probabilitySum = 0;
106 VkDeviceSize sizeSum = 0;
107 for(size_t i = 0; i < AllocationSizes.size(); ++i)
108 {
109 const AllocationSize& allocSize = AllocationSizes[i];
110 if(allocSize.BufferSizeMax > 0)
111 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
112 else
113 {
114 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
115 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
116 }
117 probabilitySum += allocSize.Probability;
118 }
119 return sizeSum / probabilitySum;
120 }
121
122 bool UsesBuffers() const
123 {
124 for(size_t i = 0; i < AllocationSizes.size(); ++i)
125 if(AllocationSizes[i].BufferSizeMax > 0)
126 return true;
127 return false;
128 }
129
130 bool UsesImages() const
131 {
132 for(size_t i = 0; i < AllocationSizes.size(); ++i)
133 if(AllocationSizes[i].ImageSizeMax > 0)
134 return true;
135 return false;
136 }
137};
138
139struct PoolTestResult
140{
141 duration TotalTime;
142 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
143 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
144 size_t LostAllocationCount, LostAllocationTotalSize;
145 size_t FailedAllocationCount, FailedAllocationTotalSize;
146};
147
148static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
149
Adam Sawicki51fa9662018-10-03 13:44:29 +0200150uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200151
Adam Sawickib8333fb2018-03-13 16:15:53 +0100152struct BufferInfo
153{
154 VkBuffer Buffer = VK_NULL_HANDLE;
155 VmaAllocation Allocation = VK_NULL_HANDLE;
156};
157
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200158static uint32_t GetAllocationStrategyCount()
159{
160 uint32_t strategyCount = 0;
161 switch(ConfigType)
162 {
163 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
164 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
165 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
166 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
167 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
168 default: assert(0);
169 }
170 return strategyCount;
171}
172
173static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
174{
175 switch(allocStrategy)
176 {
177 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
178 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
179 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
180 case 0: return "Default"; break;
181 default: assert(0); return "";
182 }
183}
184
Adam Sawickib8333fb2018-03-13 16:15:53 +0100185static void InitResult(Result& outResult)
186{
187 outResult.TotalTime = duration::zero();
188 outResult.AllocationTimeMin = duration::max();
189 outResult.AllocationTimeAvg = duration::zero();
190 outResult.AllocationTimeMax = duration::min();
191 outResult.DeallocationTimeMin = duration::max();
192 outResult.DeallocationTimeAvg = duration::zero();
193 outResult.DeallocationTimeMax = duration::min();
194 outResult.TotalMemoryAllocated = 0;
195 outResult.FreeRangeSizeAvg = 0;
196 outResult.FreeRangeSizeMax = 0;
197}
198
199class TimeRegisterObj
200{
201public:
202 TimeRegisterObj(duration& min, duration& sum, duration& max) :
203 m_Min(min),
204 m_Sum(sum),
205 m_Max(max),
206 m_TimeBeg(std::chrono::high_resolution_clock::now())
207 {
208 }
209
210 ~TimeRegisterObj()
211 {
212 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
213 m_Sum += d;
214 if(d < m_Min) m_Min = d;
215 if(d > m_Max) m_Max = d;
216 }
217
218private:
219 duration& m_Min;
220 duration& m_Sum;
221 duration& m_Max;
222 time_point m_TimeBeg;
223};
224
225struct PoolTestThreadResult
226{
227 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
228 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
229 size_t AllocationCount, DeallocationCount;
230 size_t LostAllocationCount, LostAllocationTotalSize;
231 size_t FailedAllocationCount, FailedAllocationTotalSize;
232};
233
234class AllocationTimeRegisterObj : public TimeRegisterObj
235{
236public:
237 AllocationTimeRegisterObj(Result& result) :
238 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
239 {
240 }
241};
242
243class DeallocationTimeRegisterObj : public TimeRegisterObj
244{
245public:
246 DeallocationTimeRegisterObj(Result& result) :
247 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
248 {
249 }
250};
251
252class PoolAllocationTimeRegisterObj : public TimeRegisterObj
253{
254public:
255 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
256 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
257 {
258 }
259};
260
261class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
262{
263public:
264 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
265 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
266 {
267 }
268};
269
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200270static void CurrentTimeToStr(std::string& out)
271{
272 time_t rawTime; time(&rawTime);
273 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
274 char timeStr[128];
275 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
276 out = timeStr;
277}
278
Adam Sawickib8333fb2018-03-13 16:15:53 +0100279VkResult MainTest(Result& outResult, const Config& config)
280{
281 assert(config.ThreadCount > 0);
282
283 InitResult(outResult);
284
285 RandomNumberGenerator mainRand{config.RandSeed};
286
287 time_point timeBeg = std::chrono::high_resolution_clock::now();
288
289 std::atomic<size_t> allocationCount = 0;
290 VkResult res = VK_SUCCESS;
291
292 uint32_t memUsageProbabilitySum =
293 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
294 config.MemUsageProbability[2] + config.MemUsageProbability[3];
295 assert(memUsageProbabilitySum > 0);
296
297 uint32_t allocationSizeProbabilitySum = std::accumulate(
298 config.AllocationSizes.begin(),
299 config.AllocationSizes.end(),
300 0u,
301 [](uint32_t sum, const AllocationSize& allocSize) {
302 return sum + allocSize.Probability;
303 });
304
305 struct Allocation
306 {
307 VkBuffer Buffer;
308 VkImage Image;
309 VmaAllocation Alloc;
310 };
311
312 std::vector<Allocation> commonAllocations;
313 std::mutex commonAllocationsMutex;
314
315 auto Allocate = [&](
316 VkDeviceSize bufferSize,
317 const VkExtent2D imageExtent,
318 RandomNumberGenerator& localRand,
319 VkDeviceSize& totalAllocatedBytes,
320 std::vector<Allocation>& allocations) -> VkResult
321 {
322 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
323
324 uint32_t memUsageIndex = 0;
325 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
326 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
327 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
328
329 VmaAllocationCreateInfo memReq = {};
330 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200331 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100332
333 Allocation allocation = {};
334 VmaAllocationInfo allocationInfo;
335
336 // Buffer
337 if(bufferSize > 0)
338 {
339 assert(imageExtent.width == 0);
340 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
341 bufferInfo.size = bufferSize;
342 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
343
344 {
345 AllocationTimeRegisterObj timeRegisterObj{outResult};
346 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
347 }
348 }
349 // Image
350 else
351 {
352 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
353 imageInfo.imageType = VK_IMAGE_TYPE_2D;
354 imageInfo.extent.width = imageExtent.width;
355 imageInfo.extent.height = imageExtent.height;
356 imageInfo.extent.depth = 1;
357 imageInfo.mipLevels = 1;
358 imageInfo.arrayLayers = 1;
359 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
360 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
361 VK_IMAGE_TILING_OPTIMAL :
362 VK_IMAGE_TILING_LINEAR;
363 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
364 switch(memReq.usage)
365 {
366 case VMA_MEMORY_USAGE_GPU_ONLY:
367 switch(localRand.Generate() % 3)
368 {
369 case 0:
370 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
371 break;
372 case 1:
373 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
374 break;
375 case 2:
376 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
377 break;
378 }
379 break;
380 case VMA_MEMORY_USAGE_CPU_ONLY:
381 case VMA_MEMORY_USAGE_CPU_TO_GPU:
382 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
383 break;
384 case VMA_MEMORY_USAGE_GPU_TO_CPU:
385 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
386 break;
387 }
388 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
389 imageInfo.flags = 0;
390
391 {
392 AllocationTimeRegisterObj timeRegisterObj{outResult};
393 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
394 }
395 }
396
397 if(res == VK_SUCCESS)
398 {
399 ++allocationCount;
400 totalAllocatedBytes += allocationInfo.size;
401 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
402 if(useCommonAllocations)
403 {
404 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
405 commonAllocations.push_back(allocation);
406 }
407 else
408 allocations.push_back(allocation);
409 }
410 else
411 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200412 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100413 }
414 return res;
415 };
416
417 auto GetNextAllocationSize = [&](
418 VkDeviceSize& outBufSize,
419 VkExtent2D& outImageSize,
420 RandomNumberGenerator& localRand)
421 {
422 outBufSize = 0;
423 outImageSize = {0, 0};
424
425 uint32_t allocSizeIndex = 0;
426 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
427 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
428 r -= config.AllocationSizes[allocSizeIndex++].Probability;
429
430 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
431 if(allocSize.BufferSizeMax > 0)
432 {
433 assert(allocSize.ImageSizeMax == 0);
434 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
435 outBufSize = allocSize.BufferSizeMin;
436 else
437 {
438 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
439 outBufSize = outBufSize / 16 * 16;
440 }
441 }
442 else
443 {
444 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
445 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
446 else
447 {
448 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
449 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
450 }
451 }
452 };
453
454 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
455 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
456
457 auto ThreadProc = [&](uint32_t randSeed) -> void
458 {
459 RandomNumberGenerator threadRand(randSeed);
460 VkDeviceSize threadTotalAllocatedBytes = 0;
461 std::vector<Allocation> threadAllocations;
462 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
463 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
464 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
465
466 // BEGIN ALLOCATIONS
467 for(;;)
468 {
469 VkDeviceSize bufferSize = 0;
470 VkExtent2D imageExtent = {};
471 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
472 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
473 threadBeginBytesToAllocate)
474 {
475 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
476 break;
477 }
478 else
479 break;
480 }
481
482 // ADDITIONAL ALLOCATIONS AND FREES
483 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
484 {
485 VkDeviceSize bufferSize = 0;
486 VkExtent2D imageExtent = {};
487 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
488
489 // true = allocate, false = free
490 bool allocate = threadRand.Generate() % 2 != 0;
491
492 if(allocate)
493 {
494 if(threadTotalAllocatedBytes +
495 bufferSize +
496 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
497 threadMaxBytesToAllocate)
498 {
499 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
500 break;
501 }
502 }
503 else
504 {
505 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
506 if(useCommonAllocations)
507 {
508 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
509 if(!commonAllocations.empty())
510 {
511 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
512 VmaAllocationInfo allocationInfo;
513 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
514 if(threadTotalAllocatedBytes >= allocationInfo.size)
515 {
516 DeallocationTimeRegisterObj timeRegisterObj{outResult};
517 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
518 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
519 else
520 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
521 threadTotalAllocatedBytes -= allocationInfo.size;
522 commonAllocations.erase(commonAllocations.begin() + indexToFree);
523 }
524 }
525 }
526 else
527 {
528 if(!threadAllocations.empty())
529 {
530 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
531 VmaAllocationInfo allocationInfo;
532 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
533 if(threadTotalAllocatedBytes >= allocationInfo.size)
534 {
535 DeallocationTimeRegisterObj timeRegisterObj{outResult};
536 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
537 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
538 else
539 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
540 threadTotalAllocatedBytes -= allocationInfo.size;
541 threadAllocations.erase(threadAllocations.begin() + indexToFree);
542 }
543 }
544 }
545 }
546 }
547
548 ++numThreadsReachedMaxAllocations;
549
550 WaitForSingleObject(threadsFinishEvent, INFINITE);
551
552 // DEALLOCATION
553 while(!threadAllocations.empty())
554 {
555 size_t indexToFree = 0;
556 switch(config.FreeOrder)
557 {
558 case FREE_ORDER::FORWARD:
559 indexToFree = 0;
560 break;
561 case FREE_ORDER::BACKWARD:
562 indexToFree = threadAllocations.size() - 1;
563 break;
564 case FREE_ORDER::RANDOM:
565 indexToFree = mainRand.Generate() % threadAllocations.size();
566 break;
567 }
568
569 {
570 DeallocationTimeRegisterObj timeRegisterObj{outResult};
571 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
572 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
573 else
574 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
575 }
576 threadAllocations.erase(threadAllocations.begin() + indexToFree);
577 }
578 };
579
580 uint32_t threadRandSeed = mainRand.Generate();
581 std::vector<std::thread> bkgThreads;
582 for(size_t i = 0; i < config.ThreadCount; ++i)
583 {
584 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
585 }
586
587 // Wait for threads reached max allocations
588 while(numThreadsReachedMaxAllocations < config.ThreadCount)
589 Sleep(0);
590
591 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
592 VmaStats vmaStats = {};
593 vmaCalculateStats(g_hAllocator, &vmaStats);
594 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
595 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
596 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
597
598 // Signal threads to deallocate
599 SetEvent(threadsFinishEvent);
600
601 // Wait for threads finished
602 for(size_t i = 0; i < bkgThreads.size(); ++i)
603 bkgThreads[i].join();
604 bkgThreads.clear();
605
606 CloseHandle(threadsFinishEvent);
607
608 // Deallocate remaining common resources
609 while(!commonAllocations.empty())
610 {
611 size_t indexToFree = 0;
612 switch(config.FreeOrder)
613 {
614 case FREE_ORDER::FORWARD:
615 indexToFree = 0;
616 break;
617 case FREE_ORDER::BACKWARD:
618 indexToFree = commonAllocations.size() - 1;
619 break;
620 case FREE_ORDER::RANDOM:
621 indexToFree = mainRand.Generate() % commonAllocations.size();
622 break;
623 }
624
625 {
626 DeallocationTimeRegisterObj timeRegisterObj{outResult};
627 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
628 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
629 else
630 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
631 }
632 commonAllocations.erase(commonAllocations.begin() + indexToFree);
633 }
634
635 if(allocationCount)
636 {
637 outResult.AllocationTimeAvg /= allocationCount;
638 outResult.DeallocationTimeAvg /= allocationCount;
639 }
640
641 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
642
643 return res;
644}
645
Adam Sawicki51fa9662018-10-03 13:44:29 +0200646void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100647{
648 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200649 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100650 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200651 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100652}
653
654struct AllocInfo
655{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200656 VmaAllocation m_Allocation = VK_NULL_HANDLE;
657 VkBuffer m_Buffer = VK_NULL_HANDLE;
658 VkImage m_Image = VK_NULL_HANDLE;
659 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100660 union
661 {
662 VkBufferCreateInfo m_BufferInfo;
663 VkImageCreateInfo m_ImageInfo;
664 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200665
666 void CreateBuffer(
667 const VkBufferCreateInfo& bufCreateInfo,
668 const VmaAllocationCreateInfo& allocCreateInfo);
669 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100670};
671
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200672void AllocInfo::CreateBuffer(
673 const VkBufferCreateInfo& bufCreateInfo,
674 const VmaAllocationCreateInfo& allocCreateInfo)
675{
676 m_BufferInfo = bufCreateInfo;
677 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
678 TEST(res == VK_SUCCESS);
679}
680
681void AllocInfo::Destroy()
682{
683 if(m_Image)
684 {
685 vkDestroyImage(g_hDevice, m_Image, nullptr);
686 }
687 if(m_Buffer)
688 {
689 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
690 }
691 if(m_Allocation)
692 {
693 vmaFreeMemory(g_hAllocator, m_Allocation);
694 }
695}
696
Adam Sawickif2975342018-10-16 13:49:02 +0200697class StagingBufferCollection
698{
699public:
700 StagingBufferCollection() { }
701 ~StagingBufferCollection();
702 // Returns false if maximum total size of buffers would be exceeded.
703 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
704 void ReleaseAllBuffers();
705
706private:
707 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
708 struct BufInfo
709 {
710 VmaAllocation Allocation = VK_NULL_HANDLE;
711 VkBuffer Buffer = VK_NULL_HANDLE;
712 VkDeviceSize Size = VK_WHOLE_SIZE;
713 void* MappedPtr = nullptr;
714 bool Used = false;
715 };
716 std::vector<BufInfo> m_Bufs;
717 // Including both used and unused.
718 VkDeviceSize m_TotalSize = 0;
719};
720
721StagingBufferCollection::~StagingBufferCollection()
722{
723 for(size_t i = m_Bufs.size(); i--; )
724 {
725 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
726 }
727}
728
729bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
730{
731 assert(size <= MAX_TOTAL_SIZE);
732
733 // Try to find existing unused buffer with best size.
734 size_t bestIndex = SIZE_MAX;
735 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
736 {
737 BufInfo& currBufInfo = m_Bufs[i];
738 if(!currBufInfo.Used && currBufInfo.Size >= size &&
739 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
740 {
741 bestIndex = i;
742 }
743 }
744
745 if(bestIndex != SIZE_MAX)
746 {
747 m_Bufs[bestIndex].Used = true;
748 outBuffer = m_Bufs[bestIndex].Buffer;
749 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
750 return true;
751 }
752
753 // Allocate new buffer with requested size.
754 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
755 {
756 BufInfo bufInfo;
757 bufInfo.Size = size;
758 bufInfo.Used = true;
759
760 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
761 bufCreateInfo.size = size;
762 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
763
764 VmaAllocationCreateInfo allocCreateInfo = {};
765 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
766 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
767
768 VmaAllocationInfo allocInfo;
769 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
770 bufInfo.MappedPtr = allocInfo.pMappedData;
771 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
772
773 outBuffer = bufInfo.Buffer;
774 outMappedPtr = bufInfo.MappedPtr;
775
776 m_Bufs.push_back(std::move(bufInfo));
777
778 m_TotalSize += size;
779
780 return true;
781 }
782
783 // There are some unused but smaller buffers: Free them and try again.
784 bool hasUnused = false;
785 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
786 {
787 if(!m_Bufs[i].Used)
788 {
789 hasUnused = true;
790 break;
791 }
792 }
793 if(hasUnused)
794 {
795 for(size_t i = m_Bufs.size(); i--; )
796 {
797 if(!m_Bufs[i].Used)
798 {
799 m_TotalSize -= m_Bufs[i].Size;
800 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
801 m_Bufs.erase(m_Bufs.begin() + i);
802 }
803 }
804
805 return AcquireBuffer(size, outBuffer, outMappedPtr);
806 }
807
808 return false;
809}
810
811void StagingBufferCollection::ReleaseAllBuffers()
812{
813 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
814 {
815 m_Bufs[i].Used = false;
816 }
817}
818
819static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
820{
821 StagingBufferCollection stagingBufs;
822
823 bool cmdBufferStarted = false;
824 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
825 {
826 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
827 if(currAllocInfo.m_Buffer)
828 {
829 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
830
831 VkBuffer stagingBuf = VK_NULL_HANDLE;
832 void* stagingBufMappedPtr = nullptr;
833 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
834 {
835 TEST(cmdBufferStarted);
836 EndSingleTimeCommands();
837 stagingBufs.ReleaseAllBuffers();
838 cmdBufferStarted = false;
839
840 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
841 TEST(ok);
842 }
843
844 // Fill staging buffer.
845 {
846 assert(size % sizeof(uint32_t) == 0);
847 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
848 uint32_t val = currAllocInfo.m_StartValue;
849 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
850 {
851 *stagingValPtr = val;
852 ++stagingValPtr;
853 ++val;
854 }
855 }
856
857 // Issue copy command from staging buffer to destination buffer.
858 if(!cmdBufferStarted)
859 {
860 cmdBufferStarted = true;
861 BeginSingleTimeCommands();
862 }
863
864 VkBufferCopy copy = {};
865 copy.srcOffset = 0;
866 copy.dstOffset = 0;
867 copy.size = size;
868 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
869 }
870 else
871 {
872 TEST(0 && "Images not currently supported.");
873 }
874 }
875
876 if(cmdBufferStarted)
877 {
878 EndSingleTimeCommands();
879 stagingBufs.ReleaseAllBuffers();
880 }
881}
882
883static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
884{
885 StagingBufferCollection stagingBufs;
886
887 bool cmdBufferStarted = false;
888 size_t validateAllocIndexOffset = 0;
889 std::vector<void*> validateStagingBuffers;
890 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
891 {
892 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
893 if(currAllocInfo.m_Buffer)
894 {
895 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
896
897 VkBuffer stagingBuf = VK_NULL_HANDLE;
898 void* stagingBufMappedPtr = nullptr;
899 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
900 {
901 TEST(cmdBufferStarted);
902 EndSingleTimeCommands();
903 cmdBufferStarted = false;
904
905 for(size_t validateIndex = 0;
906 validateIndex < validateStagingBuffers.size();
907 ++validateIndex)
908 {
909 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
910 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
911 TEST(validateSize % sizeof(uint32_t) == 0);
912 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
913 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
914 bool valid = true;
915 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
916 {
917 if(*stagingValPtr != val)
918 {
919 valid = false;
920 break;
921 }
922 ++stagingValPtr;
923 ++val;
924 }
925 TEST(valid);
926 }
927
928 stagingBufs.ReleaseAllBuffers();
929
930 validateAllocIndexOffset = allocInfoIndex;
931 validateStagingBuffers.clear();
932
933 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
934 TEST(ok);
935 }
936
937 // Issue copy command from staging buffer to destination buffer.
938 if(!cmdBufferStarted)
939 {
940 cmdBufferStarted = true;
941 BeginSingleTimeCommands();
942 }
943
944 VkBufferCopy copy = {};
945 copy.srcOffset = 0;
946 copy.dstOffset = 0;
947 copy.size = size;
948 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
949
950 // Sava mapped pointer for later validation.
951 validateStagingBuffers.push_back(stagingBufMappedPtr);
952 }
953 else
954 {
955 TEST(0 && "Images not currently supported.");
956 }
957 }
958
959 if(cmdBufferStarted)
960 {
961 EndSingleTimeCommands();
962
963 for(size_t validateIndex = 0;
964 validateIndex < validateStagingBuffers.size();
965 ++validateIndex)
966 {
967 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
968 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
969 TEST(validateSize % sizeof(uint32_t) == 0);
970 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
971 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
972 bool valid = true;
973 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
974 {
975 if(*stagingValPtr != val)
976 {
977 valid = false;
978 break;
979 }
980 ++stagingValPtr;
981 ++val;
982 }
983 TEST(valid);
984 }
985
986 stagingBufs.ReleaseAllBuffers();
987 }
988}
989
Adam Sawickib8333fb2018-03-13 16:15:53 +0100990static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
991{
992 outMemReq = {};
993 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
994 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
995}
996
997static void CreateBuffer(
998 VmaPool pool,
999 const VkBufferCreateInfo& bufCreateInfo,
1000 bool persistentlyMapped,
1001 AllocInfo& outAllocInfo)
1002{
1003 outAllocInfo = {};
1004 outAllocInfo.m_BufferInfo = bufCreateInfo;
1005
1006 VmaAllocationCreateInfo allocCreateInfo = {};
1007 allocCreateInfo.pool = pool;
1008 if(persistentlyMapped)
1009 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1010
1011 VmaAllocationInfo vmaAllocInfo = {};
1012 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1013
1014 // Setup StartValue and fill.
1015 {
1016 outAllocInfo.m_StartValue = (uint32_t)rand();
1017 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001018 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001019 if(!persistentlyMapped)
1020 {
1021 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1022 }
1023
1024 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001025 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001026 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1027 data[i] = value++;
1028
1029 if(!persistentlyMapped)
1030 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1031 }
1032}
1033
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001034static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001035{
1036 outAllocation.m_Allocation = nullptr;
1037 outAllocation.m_Buffer = nullptr;
1038 outAllocation.m_Image = nullptr;
1039 outAllocation.m_StartValue = (uint32_t)rand();
1040
1041 VmaAllocationCreateInfo vmaMemReq;
1042 GetMemReq(vmaMemReq);
1043
1044 VmaAllocationInfo allocInfo;
1045
1046 const bool isBuffer = true;//(rand() & 0x1) != 0;
1047 const bool isLarge = (rand() % 16) == 0;
1048 if(isBuffer)
1049 {
1050 const uint32_t bufferSize = isLarge ?
1051 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1052 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1053
1054 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1055 bufferInfo.size = bufferSize;
1056 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1057
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001058 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001059 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001060 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001061 }
1062 else
1063 {
1064 const uint32_t imageSizeX = isLarge ?
1065 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1066 rand() % 1024 + 1; // 1 ... 1024
1067 const uint32_t imageSizeY = isLarge ?
1068 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1069 rand() % 1024 + 1; // 1 ... 1024
1070
1071 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1072 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1073 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1074 imageInfo.extent.width = imageSizeX;
1075 imageInfo.extent.height = imageSizeY;
1076 imageInfo.extent.depth = 1;
1077 imageInfo.mipLevels = 1;
1078 imageInfo.arrayLayers = 1;
1079 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1080 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1081 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1082 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1083
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001084 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001085 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001086 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001087 }
1088
1089 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1090 if(allocInfo.pMappedData == nullptr)
1091 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001092 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001093 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001094 }
1095
1096 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001097 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001098 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1099 data[i] = value++;
1100
1101 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001102 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001103}
1104
1105static void DestroyAllocation(const AllocInfo& allocation)
1106{
1107 if(allocation.m_Buffer)
1108 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1109 else
1110 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1111}
1112
1113static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1114{
1115 for(size_t i = allocations.size(); i--; )
1116 DestroyAllocation(allocations[i]);
1117 allocations.clear();
1118}
1119
1120static void ValidateAllocationData(const AllocInfo& allocation)
1121{
1122 VmaAllocationInfo allocInfo;
1123 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1124
1125 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1126 if(allocInfo.pMappedData == nullptr)
1127 {
1128 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001129 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001130 }
1131
1132 uint32_t value = allocation.m_StartValue;
1133 bool ok = true;
1134 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001135 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001136 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1137 {
1138 if(data[i] != value++)
1139 {
1140 ok = false;
1141 break;
1142 }
1143 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001144 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001145
1146 if(allocInfo.pMappedData == nullptr)
1147 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1148}
1149
1150static void RecreateAllocationResource(AllocInfo& allocation)
1151{
1152 VmaAllocationInfo allocInfo;
1153 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1154
1155 if(allocation.m_Buffer)
1156 {
1157 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1158
1159 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001160 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001161
1162 // Just to silence validation layer warnings.
1163 VkMemoryRequirements vkMemReq;
1164 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001165 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001166
1167 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001168 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001169 }
1170 else
1171 {
1172 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1173
1174 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001175 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001176
1177 // Just to silence validation layer warnings.
1178 VkMemoryRequirements vkMemReq;
1179 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1180
1181 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001182 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001183 }
1184}
1185
1186static void Defragment(AllocInfo* allocs, size_t allocCount,
1187 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1188 VmaDefragmentationStats* defragmentationStats = nullptr)
1189{
1190 std::vector<VmaAllocation> vmaAllocs(allocCount);
1191 for(size_t i = 0; i < allocCount; ++i)
1192 vmaAllocs[i] = allocs[i].m_Allocation;
1193
1194 std::vector<VkBool32> allocChanged(allocCount);
1195
1196 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1197 defragmentationInfo, defragmentationStats) );
1198
1199 for(size_t i = 0; i < allocCount; ++i)
1200 {
1201 if(allocChanged[i])
1202 {
1203 RecreateAllocationResource(allocs[i]);
1204 }
1205 }
1206}
1207
1208static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1209{
1210 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1211 ValidateAllocationData(allocInfo);
1212 });
1213}
1214
1215void TestDefragmentationSimple()
1216{
1217 wprintf(L"Test defragmentation simple\n");
1218
1219 RandomNumberGenerator rand(667);
1220
1221 const VkDeviceSize BUF_SIZE = 0x10000;
1222 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1223
1224 const VkDeviceSize MIN_BUF_SIZE = 32;
1225 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1226 auto RandomBufSize = [&]() -> VkDeviceSize {
1227 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1228 };
1229
1230 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1231 bufCreateInfo.size = BUF_SIZE;
1232 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1233
1234 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1235 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1236
1237 uint32_t memTypeIndex = UINT32_MAX;
1238 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1239
1240 VmaPoolCreateInfo poolCreateInfo = {};
1241 poolCreateInfo.blockSize = BLOCK_SIZE;
1242 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1243
1244 VmaPool pool;
1245 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1246
Adam Sawickie1681912018-11-23 17:50:12 +01001247 // Defragmentation of empty pool.
1248 {
1249 VmaDefragmentationInfo2 defragInfo = {};
1250 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1251 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1252 defragInfo.poolCount = 1;
1253 defragInfo.pPools = &pool;
1254
1255 VmaDefragmentationStats defragStats = {};
1256 VmaDefragmentationContext defragCtx = nullptr;
1257 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1258 TEST(res >= VK_SUCCESS);
1259 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1260 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1261 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1262 }
1263
Adam Sawickib8333fb2018-03-13 16:15:53 +01001264 std::vector<AllocInfo> allocations;
1265
1266 // persistentlyMappedOption = 0 - not persistently mapped.
1267 // persistentlyMappedOption = 1 - persistently mapped.
1268 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1269 {
1270 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1271 const bool persistentlyMapped = persistentlyMappedOption != 0;
1272
1273 // # Test 1
1274 // Buffers of fixed size.
1275 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1276 // Expected result: at least 1 block freed.
1277 {
1278 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1279 {
1280 AllocInfo allocInfo;
1281 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1282 allocations.push_back(allocInfo);
1283 }
1284
1285 for(size_t i = 1; i < allocations.size(); ++i)
1286 {
1287 DestroyAllocation(allocations[i]);
1288 allocations.erase(allocations.begin() + i);
1289 }
1290
1291 VmaDefragmentationStats defragStats;
1292 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001293 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1294 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001295
1296 ValidateAllocationsData(allocations.data(), allocations.size());
1297
1298 DestroyAllAllocations(allocations);
1299 }
1300
1301 // # Test 2
1302 // Buffers of fixed size.
1303 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1304 // Expected result: Each of 4 interations makes some progress.
1305 {
1306 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1307 {
1308 AllocInfo allocInfo;
1309 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1310 allocations.push_back(allocInfo);
1311 }
1312
1313 for(size_t i = 1; i < allocations.size(); ++i)
1314 {
1315 DestroyAllocation(allocations[i]);
1316 allocations.erase(allocations.begin() + i);
1317 }
1318
1319 VmaDefragmentationInfo defragInfo = {};
1320 defragInfo.maxAllocationsToMove = 1;
1321 defragInfo.maxBytesToMove = BUF_SIZE;
1322
1323 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1324 {
1325 VmaDefragmentationStats defragStats;
1326 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001327 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001328 }
1329
1330 ValidateAllocationsData(allocations.data(), allocations.size());
1331
1332 DestroyAllAllocations(allocations);
1333 }
1334
1335 // # Test 3
1336 // Buffers of variable size.
1337 // Create a number of buffers. Remove some percent of them.
1338 // Defragment while having some percent of them unmovable.
1339 // Expected result: Just simple validation.
1340 {
1341 for(size_t i = 0; i < 100; ++i)
1342 {
1343 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1344 localBufCreateInfo.size = RandomBufSize();
1345
1346 AllocInfo allocInfo;
1347 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1348 allocations.push_back(allocInfo);
1349 }
1350
1351 const uint32_t percentToDelete = 60;
1352 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1353 for(size_t i = 0; i < numberToDelete; ++i)
1354 {
1355 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1356 DestroyAllocation(allocations[indexToDelete]);
1357 allocations.erase(allocations.begin() + indexToDelete);
1358 }
1359
1360 // Non-movable allocations will be at the beginning of allocations array.
1361 const uint32_t percentNonMovable = 20;
1362 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1363 for(size_t i = 0; i < numberNonMovable; ++i)
1364 {
1365 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1366 if(indexNonMovable != i)
1367 std::swap(allocations[i], allocations[indexNonMovable]);
1368 }
1369
1370 VmaDefragmentationStats defragStats;
1371 Defragment(
1372 allocations.data() + numberNonMovable,
1373 allocations.size() - numberNonMovable,
1374 nullptr, &defragStats);
1375
1376 ValidateAllocationsData(allocations.data(), allocations.size());
1377
1378 DestroyAllAllocations(allocations);
1379 }
1380 }
1381
Adam Sawicki647cf242018-11-23 17:58:00 +01001382 /*
1383 Allocation that must be move to an overlapping place using memmove().
1384 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1385 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001386 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001387 {
1388 AllocInfo allocInfo[2];
1389
1390 bufCreateInfo.size = BUF_SIZE;
1391 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1392 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1393 bufCreateInfo.size = biggerBufSize;
1394 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1395
1396 DestroyAllocation(allocInfo[0]);
1397
1398 VmaDefragmentationStats defragStats;
1399 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1400 // If this fails, it means we couldn't do memmove with overlapping regions.
1401 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1402
1403 ValidateAllocationsData(&allocInfo[1], 1);
1404 DestroyAllocation(allocInfo[1]);
1405 }
1406
Adam Sawickib8333fb2018-03-13 16:15:53 +01001407 vmaDestroyPool(g_hAllocator, pool);
1408}
1409
Adam Sawicki52076eb2018-11-22 16:14:50 +01001410void TestDefragmentationWholePool()
1411{
1412 wprintf(L"Test defragmentation whole pool\n");
1413
1414 RandomNumberGenerator rand(668);
1415
1416 const VkDeviceSize BUF_SIZE = 0x10000;
1417 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1418
1419 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1420 bufCreateInfo.size = BUF_SIZE;
1421 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1422
1423 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1424 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1425
1426 uint32_t memTypeIndex = UINT32_MAX;
1427 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1428
1429 VmaPoolCreateInfo poolCreateInfo = {};
1430 poolCreateInfo.blockSize = BLOCK_SIZE;
1431 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1432
1433 VmaDefragmentationStats defragStats[2];
1434 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1435 {
1436 VmaPool pool;
1437 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1438
1439 std::vector<AllocInfo> allocations;
1440
1441 // Buffers of fixed size.
1442 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1443 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1444 {
1445 AllocInfo allocInfo;
1446 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1447 allocations.push_back(allocInfo);
1448 }
1449
1450 for(size_t i = 1; i < allocations.size(); ++i)
1451 {
1452 DestroyAllocation(allocations[i]);
1453 allocations.erase(allocations.begin() + i);
1454 }
1455
1456 VmaDefragmentationInfo2 defragInfo = {};
1457 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1458 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1459 std::vector<VmaAllocation> allocationsToDefrag;
1460 if(caseIndex == 0)
1461 {
1462 defragInfo.poolCount = 1;
1463 defragInfo.pPools = &pool;
1464 }
1465 else
1466 {
1467 const size_t allocCount = allocations.size();
1468 allocationsToDefrag.resize(allocCount);
1469 std::transform(
1470 allocations.begin(), allocations.end(),
1471 allocationsToDefrag.begin(),
1472 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1473 defragInfo.allocationCount = (uint32_t)allocCount;
1474 defragInfo.pAllocations = allocationsToDefrag.data();
1475 }
1476
1477 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1478 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1479 TEST(res >= VK_SUCCESS);
1480 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1481
1482 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1483
1484 ValidateAllocationsData(allocations.data(), allocations.size());
1485
1486 DestroyAllAllocations(allocations);
1487
1488 vmaDestroyPool(g_hAllocator, pool);
1489 }
1490
1491 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1492 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1493 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1494 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1495}
1496
Adam Sawickib8333fb2018-03-13 16:15:53 +01001497void TestDefragmentationFull()
1498{
1499 std::vector<AllocInfo> allocations;
1500
1501 // Create initial allocations.
1502 for(size_t i = 0; i < 400; ++i)
1503 {
1504 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001505 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001506 allocations.push_back(allocation);
1507 }
1508
1509 // Delete random allocations
1510 const size_t allocationsToDeletePercent = 80;
1511 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1512 for(size_t i = 0; i < allocationsToDelete; ++i)
1513 {
1514 size_t index = (size_t)rand() % allocations.size();
1515 DestroyAllocation(allocations[index]);
1516 allocations.erase(allocations.begin() + index);
1517 }
1518
1519 for(size_t i = 0; i < allocations.size(); ++i)
1520 ValidateAllocationData(allocations[i]);
1521
Adam Sawicki0667e332018-08-24 17:26:44 +02001522 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001523
1524 {
1525 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1526 for(size_t i = 0; i < allocations.size(); ++i)
1527 vmaAllocations[i] = allocations[i].m_Allocation;
1528
1529 const size_t nonMovablePercent = 0;
1530 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1531 for(size_t i = 0; i < nonMovableCount; ++i)
1532 {
1533 size_t index = (size_t)rand() % vmaAllocations.size();
1534 vmaAllocations.erase(vmaAllocations.begin() + index);
1535 }
1536
1537 const uint32_t defragCount = 1;
1538 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1539 {
1540 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1541
1542 VmaDefragmentationInfo defragmentationInfo;
1543 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1544 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1545
1546 wprintf(L"Defragmentation #%u\n", defragIndex);
1547
1548 time_point begTime = std::chrono::high_resolution_clock::now();
1549
1550 VmaDefragmentationStats stats;
1551 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001552 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001553
1554 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1555
1556 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1557 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1558 wprintf(L"Time: %.2f s\n", defragmentDuration);
1559
1560 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1561 {
1562 if(allocationsChanged[i])
1563 {
1564 RecreateAllocationResource(allocations[i]);
1565 }
1566 }
1567
1568 for(size_t i = 0; i < allocations.size(); ++i)
1569 ValidateAllocationData(allocations[i]);
1570
Adam Sawicki0667e332018-08-24 17:26:44 +02001571 //wchar_t fileName[MAX_PATH];
1572 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1573 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001574 }
1575 }
1576
1577 // Destroy all remaining allocations.
1578 DestroyAllAllocations(allocations);
1579}
1580
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001581static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001582{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001583 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001584 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001585
1586 std::vector<AllocInfo> allocations;
1587
1588 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001589 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1590 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001591 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001592 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1593 const size_t percentToLeave = 30;
1594 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001595 RandomNumberGenerator rand = { 234522 };
1596
1597 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001598
1599 VmaAllocationCreateInfo allocCreateInfo = {};
1600 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001601 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001602
1603 // Create all intended buffers.
1604 for(size_t i = 0; i < bufCount; ++i)
1605 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001606 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1607
1608 if(rand.Generate() % 100 < percentNonMovable)
1609 {
1610 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1611 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1612 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1613 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1614 }
1615 else
1616 {
1617 // Different usage just to see different color in output from VmaDumpVis.
1618 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1619 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1620 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1621 // And in JSON dump.
1622 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1623 }
1624
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001625 AllocInfo alloc;
1626 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1627 alloc.m_StartValue = rand.Generate();
1628 allocations.push_back(alloc);
1629 }
1630
1631 // Destroy some percentage of them.
1632 {
1633 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1634 for(size_t i = 0; i < buffersToDestroy; ++i)
1635 {
1636 const size_t index = rand.Generate() % allocations.size();
1637 allocations[index].Destroy();
1638 allocations.erase(allocations.begin() + index);
1639 }
1640 }
1641
1642 // Fill them with meaningful data.
1643 UploadGpuData(allocations.data(), allocations.size());
1644
Adam Sawickic6ede152018-11-16 17:04:14 +01001645 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001646 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001647 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001648
1649 // Defragment using GPU only.
1650 {
1651 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001652
Adam Sawickic6ede152018-11-16 17:04:14 +01001653 std::vector<VmaAllocation> allocationPtrs;
1654 std::vector<VkBool32> allocationChanged;
1655 std::vector<size_t> allocationOriginalIndex;
1656
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001657 for(size_t i = 0; i < allocCount; ++i)
1658 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001659 VmaAllocationInfo allocInfo = {};
1660 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1661 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1662 {
1663 allocationPtrs.push_back(allocations[i].m_Allocation);
1664 allocationChanged.push_back(VK_FALSE);
1665 allocationOriginalIndex.push_back(i);
1666 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001667 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001668
1669 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001670
1671 BeginSingleTimeCommands();
1672
1673 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001674 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001675 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001676 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001677 defragInfo.pAllocationsChanged = allocationChanged.data();
1678 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001679 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1680 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1681
1682 VmaDefragmentationStats stats = {};
1683 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1684 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1685 TEST(res >= VK_SUCCESS);
1686
1687 EndSingleTimeCommands();
1688
1689 vmaDefragmentationEnd(g_hAllocator, ctx);
1690
Adam Sawickic6ede152018-11-16 17:04:14 +01001691 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001692 {
1693 if(allocationChanged[i])
1694 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001695 const size_t origAllocIndex = allocationOriginalIndex[i];
1696 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001697 }
1698 }
1699
Adam Sawicki440307e2018-10-18 15:05:19 +02001700 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1701 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001702 }
1703
1704 ValidateGpuData(allocations.data(), allocations.size());
1705
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001706 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001707 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001708
1709 // Destroy all remaining buffers.
1710 for(size_t i = allocations.size(); i--; )
1711 {
1712 allocations[i].Destroy();
1713 }
Adam Sawicki05704002018-11-08 16:07:29 +01001714
1715 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001716}
1717
Adam Sawickib8333fb2018-03-13 16:15:53 +01001718static void TestUserData()
1719{
1720 VkResult res;
1721
1722 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1723 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1724 bufCreateInfo.size = 0x10000;
1725
1726 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1727 {
1728 // Opaque pointer
1729 {
1730
1731 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1732 void* pointerToSomething = &res;
1733
1734 VmaAllocationCreateInfo allocCreateInfo = {};
1735 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1736 allocCreateInfo.pUserData = numberAsPointer;
1737 if(testIndex == 1)
1738 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1739
1740 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1741 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001742 TEST(res == VK_SUCCESS);
1743 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001744
1745 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001746 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001747
1748 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1749 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001750 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001751
1752 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1753 }
1754
1755 // String
1756 {
1757 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1758 const char* name2 = "2";
1759 const size_t name1Len = strlen(name1);
1760
1761 char* name1Buf = new char[name1Len + 1];
1762 strcpy_s(name1Buf, name1Len + 1, name1);
1763
1764 VmaAllocationCreateInfo allocCreateInfo = {};
1765 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1766 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1767 allocCreateInfo.pUserData = name1Buf;
1768 if(testIndex == 1)
1769 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1770
1771 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1772 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001773 TEST(res == VK_SUCCESS);
1774 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1775 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001776
1777 delete[] name1Buf;
1778
1779 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001780 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001781
1782 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1783 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001784 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001785
1786 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1787 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001788 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001789
1790 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1791 }
1792 }
1793}
1794
Adam Sawicki370ab182018-11-08 16:31:00 +01001795static void TestInvalidAllocations()
1796{
1797 VkResult res;
1798
1799 VmaAllocationCreateInfo allocCreateInfo = {};
1800 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1801
1802 // Try to allocate 0 bytes.
1803 {
1804 VkMemoryRequirements memReq = {};
1805 memReq.size = 0; // !!!
1806 memReq.alignment = 4;
1807 memReq.memoryTypeBits = UINT32_MAX;
1808 VmaAllocation alloc = VK_NULL_HANDLE;
1809 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1810 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1811 }
1812
1813 // Try to create buffer with size = 0.
1814 {
1815 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1816 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1817 bufCreateInfo.size = 0; // !!!
1818 VkBuffer buf = VK_NULL_HANDLE;
1819 VmaAllocation alloc = VK_NULL_HANDLE;
1820 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1821 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1822 }
1823
1824 // Try to create image with one dimension = 0.
1825 {
1826 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1827 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1828 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1829 imageCreateInfo.extent.width = 128;
1830 imageCreateInfo.extent.height = 0; // !!!
1831 imageCreateInfo.extent.depth = 1;
1832 imageCreateInfo.mipLevels = 1;
1833 imageCreateInfo.arrayLayers = 1;
1834 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1835 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1836 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1837 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1838 VkImage image = VK_NULL_HANDLE;
1839 VmaAllocation alloc = VK_NULL_HANDLE;
1840 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1841 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1842 }
1843}
1844
Adam Sawickib8333fb2018-03-13 16:15:53 +01001845static void TestMemoryRequirements()
1846{
1847 VkResult res;
1848 VkBuffer buf;
1849 VmaAllocation alloc;
1850 VmaAllocationInfo allocInfo;
1851
1852 const VkPhysicalDeviceMemoryProperties* memProps;
1853 vmaGetMemoryProperties(g_hAllocator, &memProps);
1854
1855 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1856 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1857 bufInfo.size = 128;
1858
1859 VmaAllocationCreateInfo allocCreateInfo = {};
1860
1861 // No requirements.
1862 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001863 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001864 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1865
1866 // Usage.
1867 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1868 allocCreateInfo.requiredFlags = 0;
1869 allocCreateInfo.preferredFlags = 0;
1870 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1871
1872 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001873 TEST(res == VK_SUCCESS);
1874 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001875 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1876
1877 // Required flags, preferred flags.
1878 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1879 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1880 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1881 allocCreateInfo.memoryTypeBits = 0;
1882
1883 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001884 TEST(res == VK_SUCCESS);
1885 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1886 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001887 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1888
1889 // memoryTypeBits.
1890 const uint32_t memType = allocInfo.memoryType;
1891 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1892 allocCreateInfo.requiredFlags = 0;
1893 allocCreateInfo.preferredFlags = 0;
1894 allocCreateInfo.memoryTypeBits = 1u << memType;
1895
1896 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001897 TEST(res == VK_SUCCESS);
1898 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001899 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1900
1901}
1902
1903static void TestBasics()
1904{
1905 VkResult res;
1906
1907 TestMemoryRequirements();
1908
1909 // Lost allocation
1910 {
1911 VmaAllocation alloc = VK_NULL_HANDLE;
1912 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001913 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001914
1915 VmaAllocationInfo allocInfo;
1916 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001917 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1918 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001919
1920 vmaFreeMemory(g_hAllocator, alloc);
1921 }
1922
1923 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1924 {
1925 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1926 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1927 bufCreateInfo.size = 128;
1928
1929 VmaAllocationCreateInfo allocCreateInfo = {};
1930 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1931 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1932
1933 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1934 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001935 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001936
1937 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1938
1939 // Same with OWN_MEMORY.
1940 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1941
1942 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001943 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001944
1945 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1946 }
1947
1948 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001949
1950 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001951}
1952
1953void TestHeapSizeLimit()
1954{
1955 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1956 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1957
1958 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1959 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1960 {
1961 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1962 }
1963
1964 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1965 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1966 allocatorCreateInfo.device = g_hDevice;
1967 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1968
1969 VmaAllocator hAllocator;
1970 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001971 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001972
1973 struct Item
1974 {
1975 VkBuffer hBuf;
1976 VmaAllocation hAlloc;
1977 };
1978 std::vector<Item> items;
1979
1980 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1981 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1982
1983 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
1984 VmaAllocationInfo ownAllocInfo;
1985 {
1986 VmaAllocationCreateInfo allocCreateInfo = {};
1987 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1988 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1989
1990 bufCreateInfo.size = BLOCK_SIZE / 2;
1991
1992 for(size_t i = 0; i < 2; ++i)
1993 {
1994 Item item;
1995 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001996 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001997 items.push_back(item);
1998 }
1999 }
2000
2001 // Create pool to make sure allocations must be out of this memory type.
2002 VmaPoolCreateInfo poolCreateInfo = {};
2003 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
2004 poolCreateInfo.blockSize = BLOCK_SIZE;
2005
2006 VmaPool hPool;
2007 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002008 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002009
2010 // 2. Allocate normal buffers from all the remaining memory.
2011 {
2012 VmaAllocationCreateInfo allocCreateInfo = {};
2013 allocCreateInfo.pool = hPool;
2014
2015 bufCreateInfo.size = BLOCK_SIZE / 2;
2016
2017 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2018 for(size_t i = 0; i < bufCount; ++i)
2019 {
2020 Item item;
2021 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002022 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002023 items.push_back(item);
2024 }
2025 }
2026
2027 // 3. Allocation of one more (even small) buffer should fail.
2028 {
2029 VmaAllocationCreateInfo allocCreateInfo = {};
2030 allocCreateInfo.pool = hPool;
2031
2032 bufCreateInfo.size = 128;
2033
2034 VkBuffer hBuf;
2035 VmaAllocation hAlloc;
2036 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002037 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002038 }
2039
2040 // Destroy everything.
2041 for(size_t i = items.size(); i--; )
2042 {
2043 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2044 }
2045
2046 vmaDestroyPool(hAllocator, hPool);
2047
2048 vmaDestroyAllocator(hAllocator);
2049}
2050
Adam Sawicki212a4a62018-06-14 15:44:45 +02002051#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002052static void TestDebugMargin()
2053{
2054 if(VMA_DEBUG_MARGIN == 0)
2055 {
2056 return;
2057 }
2058
2059 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002060 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002061
2062 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002063 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002064
2065 // Create few buffers of different size.
2066 const size_t BUF_COUNT = 10;
2067 BufferInfo buffers[BUF_COUNT];
2068 VmaAllocationInfo allocInfo[BUF_COUNT];
2069 for(size_t i = 0; i < 10; ++i)
2070 {
2071 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002072 // Last one will be mapped.
2073 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002074
2075 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002076 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002077 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002078 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002079
2080 if(i == BUF_COUNT - 1)
2081 {
2082 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002083 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002084 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2085 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2086 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002087 }
2088
2089 // Check if their offsets preserve margin between them.
2090 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2091 {
2092 if(lhs.deviceMemory != rhs.deviceMemory)
2093 {
2094 return lhs.deviceMemory < rhs.deviceMemory;
2095 }
2096 return lhs.offset < rhs.offset;
2097 });
2098 for(size_t i = 1; i < BUF_COUNT; ++i)
2099 {
2100 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2101 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002102 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002103 }
2104 }
2105
Adam Sawicki212a4a62018-06-14 15:44:45 +02002106 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002107 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002108
Adam Sawicki73b16652018-06-11 16:39:25 +02002109 // Destroy all buffers.
2110 for(size_t i = BUF_COUNT; i--; )
2111 {
2112 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2113 }
2114}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002115#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002116
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002117static void TestLinearAllocator()
2118{
2119 wprintf(L"Test linear allocator\n");
2120
2121 RandomNumberGenerator rand{645332};
2122
2123 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2124 sampleBufCreateInfo.size = 1024; // Whatever.
2125 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2126
2127 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2128 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2129
2130 VmaPoolCreateInfo poolCreateInfo = {};
2131 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002132 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002133
Adam Sawickiee082772018-06-20 17:45:49 +02002134 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002135 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2136 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2137
2138 VmaPool pool = nullptr;
2139 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002140 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002141
2142 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2143
2144 VmaAllocationCreateInfo allocCreateInfo = {};
2145 allocCreateInfo.pool = pool;
2146
2147 constexpr size_t maxBufCount = 100;
2148 std::vector<BufferInfo> bufInfo;
2149
2150 constexpr VkDeviceSize bufSizeMin = 16;
2151 constexpr VkDeviceSize bufSizeMax = 1024;
2152 VmaAllocationInfo allocInfo;
2153 VkDeviceSize prevOffset = 0;
2154
2155 // Test one-time free.
2156 for(size_t i = 0; i < 2; ++i)
2157 {
2158 // Allocate number of buffers of varying size that surely fit into this block.
2159 VkDeviceSize bufSumSize = 0;
2160 for(size_t i = 0; i < maxBufCount; ++i)
2161 {
2162 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2163 BufferInfo newBufInfo;
2164 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2165 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002166 TEST(res == VK_SUCCESS);
2167 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002168 bufInfo.push_back(newBufInfo);
2169 prevOffset = allocInfo.offset;
2170 bufSumSize += bufCreateInfo.size;
2171 }
2172
2173 // Validate pool stats.
2174 VmaPoolStats stats;
2175 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002176 TEST(stats.size == poolCreateInfo.blockSize);
2177 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2178 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002179
2180 // Destroy the buffers in random order.
2181 while(!bufInfo.empty())
2182 {
2183 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2184 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2185 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2186 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2187 }
2188 }
2189
2190 // Test stack.
2191 {
2192 // Allocate number of buffers of varying size that surely fit into this block.
2193 for(size_t i = 0; i < maxBufCount; ++i)
2194 {
2195 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2196 BufferInfo newBufInfo;
2197 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2198 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002199 TEST(res == VK_SUCCESS);
2200 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002201 bufInfo.push_back(newBufInfo);
2202 prevOffset = allocInfo.offset;
2203 }
2204
2205 // Destroy few buffers from top of the stack.
2206 for(size_t i = 0; i < maxBufCount / 5; ++i)
2207 {
2208 const BufferInfo& currBufInfo = bufInfo.back();
2209 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2210 bufInfo.pop_back();
2211 }
2212
2213 // Create some more
2214 for(size_t i = 0; i < maxBufCount / 5; ++i)
2215 {
2216 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2217 BufferInfo newBufInfo;
2218 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2219 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002220 TEST(res == VK_SUCCESS);
2221 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002222 bufInfo.push_back(newBufInfo);
2223 prevOffset = allocInfo.offset;
2224 }
2225
2226 // Destroy the buffers in reverse order.
2227 while(!bufInfo.empty())
2228 {
2229 const BufferInfo& currBufInfo = bufInfo.back();
2230 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2231 bufInfo.pop_back();
2232 }
2233 }
2234
Adam Sawickiee082772018-06-20 17:45:49 +02002235 // Test ring buffer.
2236 {
2237 // Allocate number of buffers that surely fit into this block.
2238 bufCreateInfo.size = bufSizeMax;
2239 for(size_t i = 0; i < maxBufCount; ++i)
2240 {
2241 BufferInfo newBufInfo;
2242 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2243 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002244 TEST(res == VK_SUCCESS);
2245 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002246 bufInfo.push_back(newBufInfo);
2247 prevOffset = allocInfo.offset;
2248 }
2249
2250 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2251 const size_t buffersPerIter = maxBufCount / 10 - 1;
2252 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2253 for(size_t iter = 0; iter < iterCount; ++iter)
2254 {
2255 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2256 {
2257 const BufferInfo& currBufInfo = bufInfo.front();
2258 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2259 bufInfo.erase(bufInfo.begin());
2260 }
2261 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2262 {
2263 BufferInfo newBufInfo;
2264 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2265 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002266 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002267 bufInfo.push_back(newBufInfo);
2268 }
2269 }
2270
2271 // Allocate buffers until we reach out-of-memory.
2272 uint32_t debugIndex = 0;
2273 while(res == VK_SUCCESS)
2274 {
2275 BufferInfo newBufInfo;
2276 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2277 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2278 if(res == VK_SUCCESS)
2279 {
2280 bufInfo.push_back(newBufInfo);
2281 }
2282 else
2283 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002284 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002285 }
2286 ++debugIndex;
2287 }
2288
2289 // Destroy the buffers in random order.
2290 while(!bufInfo.empty())
2291 {
2292 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2293 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2294 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2295 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2296 }
2297 }
2298
Adam Sawicki680b2252018-08-22 14:47:32 +02002299 // Test double stack.
2300 {
2301 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2302 VkDeviceSize prevOffsetLower = 0;
2303 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2304 for(size_t i = 0; i < maxBufCount; ++i)
2305 {
2306 const bool upperAddress = (i % 2) != 0;
2307 if(upperAddress)
2308 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2309 else
2310 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2311 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2312 BufferInfo newBufInfo;
2313 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2314 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002315 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002316 if(upperAddress)
2317 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002318 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002319 prevOffsetUpper = allocInfo.offset;
2320 }
2321 else
2322 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002323 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002324 prevOffsetLower = allocInfo.offset;
2325 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002326 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002327 bufInfo.push_back(newBufInfo);
2328 }
2329
2330 // Destroy few buffers from top of the stack.
2331 for(size_t i = 0; i < maxBufCount / 5; ++i)
2332 {
2333 const BufferInfo& currBufInfo = bufInfo.back();
2334 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2335 bufInfo.pop_back();
2336 }
2337
2338 // Create some more
2339 for(size_t i = 0; i < maxBufCount / 5; ++i)
2340 {
2341 const bool upperAddress = (i % 2) != 0;
2342 if(upperAddress)
2343 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2344 else
2345 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2346 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2347 BufferInfo newBufInfo;
2348 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2349 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002350 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002351 bufInfo.push_back(newBufInfo);
2352 }
2353
2354 // Destroy the buffers in reverse order.
2355 while(!bufInfo.empty())
2356 {
2357 const BufferInfo& currBufInfo = bufInfo.back();
2358 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2359 bufInfo.pop_back();
2360 }
2361
2362 // Create buffers on both sides until we reach out of memory.
2363 prevOffsetLower = 0;
2364 prevOffsetUpper = poolCreateInfo.blockSize;
2365 res = VK_SUCCESS;
2366 for(size_t i = 0; res == VK_SUCCESS; ++i)
2367 {
2368 const bool upperAddress = (i % 2) != 0;
2369 if(upperAddress)
2370 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2371 else
2372 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2373 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2374 BufferInfo newBufInfo;
2375 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2376 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2377 if(res == VK_SUCCESS)
2378 {
2379 if(upperAddress)
2380 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002381 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002382 prevOffsetUpper = allocInfo.offset;
2383 }
2384 else
2385 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002386 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002387 prevOffsetLower = allocInfo.offset;
2388 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002389 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002390 bufInfo.push_back(newBufInfo);
2391 }
2392 }
2393
2394 // Destroy the buffers in random order.
2395 while(!bufInfo.empty())
2396 {
2397 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2398 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2399 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2400 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2401 }
2402
2403 // Create buffers on upper side only, constant size, until we reach out of memory.
2404 prevOffsetUpper = poolCreateInfo.blockSize;
2405 res = VK_SUCCESS;
2406 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2407 bufCreateInfo.size = bufSizeMax;
2408 for(size_t i = 0; res == VK_SUCCESS; ++i)
2409 {
2410 BufferInfo newBufInfo;
2411 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2412 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2413 if(res == VK_SUCCESS)
2414 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002415 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002416 prevOffsetUpper = allocInfo.offset;
2417 bufInfo.push_back(newBufInfo);
2418 }
2419 }
2420
2421 // Destroy the buffers in reverse order.
2422 while(!bufInfo.empty())
2423 {
2424 const BufferInfo& currBufInfo = bufInfo.back();
2425 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2426 bufInfo.pop_back();
2427 }
2428 }
2429
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002430 // Test ring buffer with lost allocations.
2431 {
2432 // Allocate number of buffers until pool is full.
2433 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2434 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2435 res = VK_SUCCESS;
2436 for(size_t i = 0; res == VK_SUCCESS; ++i)
2437 {
2438 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2439
2440 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2441
2442 BufferInfo newBufInfo;
2443 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2444 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2445 if(res == VK_SUCCESS)
2446 bufInfo.push_back(newBufInfo);
2447 }
2448
2449 // Free first half of it.
2450 {
2451 const size_t buffersToDelete = bufInfo.size() / 2;
2452 for(size_t i = 0; i < buffersToDelete; ++i)
2453 {
2454 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2455 }
2456 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2457 }
2458
2459 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002460 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002461 res = VK_SUCCESS;
2462 for(size_t i = 0; res == VK_SUCCESS; ++i)
2463 {
2464 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2465
2466 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2467
2468 BufferInfo newBufInfo;
2469 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2470 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2471 if(res == VK_SUCCESS)
2472 bufInfo.push_back(newBufInfo);
2473 }
2474
2475 VkDeviceSize firstNewOffset;
2476 {
2477 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2478
2479 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2480 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2481 bufCreateInfo.size = bufSizeMax;
2482
2483 BufferInfo newBufInfo;
2484 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2485 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002486 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002487 bufInfo.push_back(newBufInfo);
2488 firstNewOffset = allocInfo.offset;
2489
2490 // Make sure at least one buffer from the beginning became lost.
2491 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002492 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002493 }
2494
2495 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2496 size_t newCount = 1;
2497 for(;;)
2498 {
2499 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2500
2501 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2502
2503 BufferInfo newBufInfo;
2504 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2505 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002506 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002507 bufInfo.push_back(newBufInfo);
2508 ++newCount;
2509 if(allocInfo.offset < firstNewOffset)
2510 break;
2511 }
2512
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002513 // Delete buffers that are lost.
2514 for(size_t i = bufInfo.size(); i--; )
2515 {
2516 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2517 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2518 {
2519 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2520 bufInfo.erase(bufInfo.begin() + i);
2521 }
2522 }
2523
2524 // Test vmaMakePoolAllocationsLost
2525 {
2526 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2527
2528 size_t lostAllocCount = SIZE_MAX;
2529 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002530 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002531
2532 size_t realLostAllocCount = 0;
2533 for(size_t i = 0; i < bufInfo.size(); ++i)
2534 {
2535 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2536 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2537 ++realLostAllocCount;
2538 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002539 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002540 }
2541
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002542 // Destroy all the buffers in forward order.
2543 for(size_t i = 0; i < bufInfo.size(); ++i)
2544 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2545 bufInfo.clear();
2546 }
2547
Adam Sawicki70a683e2018-08-24 15:36:32 +02002548 vmaDestroyPool(g_hAllocator, pool);
2549}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002550
Adam Sawicki70a683e2018-08-24 15:36:32 +02002551static void TestLinearAllocatorMultiBlock()
2552{
2553 wprintf(L"Test linear allocator multi block\n");
2554
2555 RandomNumberGenerator rand{345673};
2556
2557 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2558 sampleBufCreateInfo.size = 1024 * 1024;
2559 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2560
2561 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2562 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2563
2564 VmaPoolCreateInfo poolCreateInfo = {};
2565 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2566 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002567 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002568
2569 VmaPool pool = nullptr;
2570 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002571 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002572
2573 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2574
2575 VmaAllocationCreateInfo allocCreateInfo = {};
2576 allocCreateInfo.pool = pool;
2577
2578 std::vector<BufferInfo> bufInfo;
2579 VmaAllocationInfo allocInfo;
2580
2581 // Test one-time free.
2582 {
2583 // Allocate buffers until we move to a second block.
2584 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2585 for(uint32_t i = 0; ; ++i)
2586 {
2587 BufferInfo newBufInfo;
2588 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2589 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002590 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002591 bufInfo.push_back(newBufInfo);
2592 if(lastMem && allocInfo.deviceMemory != lastMem)
2593 {
2594 break;
2595 }
2596 lastMem = allocInfo.deviceMemory;
2597 }
2598
Adam Sawickib8d34d52018-10-03 17:41:20 +02002599 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002600
2601 // Make sure that pool has now two blocks.
2602 VmaPoolStats poolStats = {};
2603 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002604 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002605
2606 // Destroy all the buffers in random order.
2607 while(!bufInfo.empty())
2608 {
2609 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2610 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2611 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2612 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2613 }
2614
2615 // Make sure that pool has now at most one block.
2616 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002617 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002618 }
2619
2620 // Test stack.
2621 {
2622 // Allocate buffers until we move to a second block.
2623 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2624 for(uint32_t i = 0; ; ++i)
2625 {
2626 BufferInfo newBufInfo;
2627 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2628 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002629 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002630 bufInfo.push_back(newBufInfo);
2631 if(lastMem && allocInfo.deviceMemory != lastMem)
2632 {
2633 break;
2634 }
2635 lastMem = allocInfo.deviceMemory;
2636 }
2637
Adam Sawickib8d34d52018-10-03 17:41:20 +02002638 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002639
2640 // Add few more buffers.
2641 for(uint32_t i = 0; i < 5; ++i)
2642 {
2643 BufferInfo newBufInfo;
2644 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2645 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002646 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002647 bufInfo.push_back(newBufInfo);
2648 }
2649
2650 // Make sure that pool has now two blocks.
2651 VmaPoolStats poolStats = {};
2652 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002653 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002654
2655 // Delete half of buffers, LIFO.
2656 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2657 {
2658 const BufferInfo& currBufInfo = bufInfo.back();
2659 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2660 bufInfo.pop_back();
2661 }
2662
2663 // Add one more buffer.
2664 BufferInfo newBufInfo;
2665 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2666 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002667 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002668 bufInfo.push_back(newBufInfo);
2669
2670 // Make sure that pool has now one block.
2671 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002672 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002673
2674 // Delete all the remaining buffers, LIFO.
2675 while(!bufInfo.empty())
2676 {
2677 const BufferInfo& currBufInfo = bufInfo.back();
2678 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2679 bufInfo.pop_back();
2680 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002681 }
2682
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002683 vmaDestroyPool(g_hAllocator, pool);
2684}
2685
Adam Sawickifd11d752018-08-22 15:02:10 +02002686static void ManuallyTestLinearAllocator()
2687{
2688 VmaStats origStats;
2689 vmaCalculateStats(g_hAllocator, &origStats);
2690
2691 wprintf(L"Manually test linear allocator\n");
2692
2693 RandomNumberGenerator rand{645332};
2694
2695 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2696 sampleBufCreateInfo.size = 1024; // Whatever.
2697 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2698
2699 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2700 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2701
2702 VmaPoolCreateInfo poolCreateInfo = {};
2703 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002704 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002705
2706 poolCreateInfo.blockSize = 10 * 1024;
2707 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2708 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2709
2710 VmaPool pool = nullptr;
2711 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002712 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002713
2714 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2715
2716 VmaAllocationCreateInfo allocCreateInfo = {};
2717 allocCreateInfo.pool = pool;
2718
2719 std::vector<BufferInfo> bufInfo;
2720 VmaAllocationInfo allocInfo;
2721 BufferInfo newBufInfo;
2722
2723 // Test double stack.
2724 {
2725 /*
2726 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2727 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2728
2729 Totally:
2730 1 block allocated
2731 10240 Vulkan bytes
2732 6 new allocations
2733 2256 bytes in allocations
2734 */
2735
2736 bufCreateInfo.size = 32;
2737 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2738 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002739 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002740 bufInfo.push_back(newBufInfo);
2741
2742 bufCreateInfo.size = 1024;
2743 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2744 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002745 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002746 bufInfo.push_back(newBufInfo);
2747
2748 bufCreateInfo.size = 32;
2749 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2750 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002751 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002752 bufInfo.push_back(newBufInfo);
2753
2754 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2755
2756 bufCreateInfo.size = 128;
2757 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2758 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002759 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002760 bufInfo.push_back(newBufInfo);
2761
2762 bufCreateInfo.size = 1024;
2763 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2764 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002765 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002766 bufInfo.push_back(newBufInfo);
2767
2768 bufCreateInfo.size = 16;
2769 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2770 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002771 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002772 bufInfo.push_back(newBufInfo);
2773
2774 VmaStats currStats;
2775 vmaCalculateStats(g_hAllocator, &currStats);
2776 VmaPoolStats poolStats;
2777 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2778
2779 char* statsStr = nullptr;
2780 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2781
2782 // PUT BREAKPOINT HERE TO CHECK.
2783 // Inspect: currStats versus origStats, poolStats, statsStr.
2784 int I = 0;
2785
2786 vmaFreeStatsString(g_hAllocator, statsStr);
2787
2788 // Destroy the buffers in reverse order.
2789 while(!bufInfo.empty())
2790 {
2791 const BufferInfo& currBufInfo = bufInfo.back();
2792 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2793 bufInfo.pop_back();
2794 }
2795 }
2796
2797 vmaDestroyPool(g_hAllocator, pool);
2798}
2799
Adam Sawicki80927152018-09-07 17:27:23 +02002800static void BenchmarkAlgorithmsCase(FILE* file,
2801 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002802 bool empty,
2803 VmaAllocationCreateFlags allocStrategy,
2804 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002805{
2806 RandomNumberGenerator rand{16223};
2807
2808 const VkDeviceSize bufSizeMin = 32;
2809 const VkDeviceSize bufSizeMax = 1024;
2810 const size_t maxBufCapacity = 10000;
2811 const uint32_t iterationCount = 10;
2812
2813 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2814 sampleBufCreateInfo.size = bufSizeMax;
2815 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2816
2817 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2818 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2819
2820 VmaPoolCreateInfo poolCreateInfo = {};
2821 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002822 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002823
2824 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002825 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002826 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2827
2828 VmaPool pool = nullptr;
2829 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002830 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002831
2832 // Buffer created just to get memory requirements. Never bound to any memory.
2833 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2834 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002835 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002836
2837 VkMemoryRequirements memReq = {};
2838 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2839
2840 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2841
2842 VmaAllocationCreateInfo allocCreateInfo = {};
2843 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002844 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002845
2846 VmaAllocation alloc;
2847 std::vector<VmaAllocation> baseAllocations;
2848
2849 if(!empty)
2850 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002851 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002852 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002853 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002854 {
2855 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2856 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002857 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002858 baseAllocations.push_back(alloc);
2859 totalSize += memReq.size;
2860 }
2861
2862 // Delete half of them, choose randomly.
2863 size_t allocsToDelete = baseAllocations.size() / 2;
2864 for(size_t i = 0; i < allocsToDelete; ++i)
2865 {
2866 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2867 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2868 baseAllocations.erase(baseAllocations.begin() + index);
2869 }
2870 }
2871
2872 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002873 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002874 std::vector<VmaAllocation> testAllocations;
2875 testAllocations.reserve(allocCount);
2876 duration allocTotalDuration = duration::zero();
2877 duration freeTotalDuration = duration::zero();
2878 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2879 {
2880 // Allocations
2881 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2882 for(size_t i = 0; i < allocCount; ++i)
2883 {
2884 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2885 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002886 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002887 testAllocations.push_back(alloc);
2888 }
2889 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2890
2891 // Deallocations
2892 switch(freeOrder)
2893 {
2894 case FREE_ORDER::FORWARD:
2895 // Leave testAllocations unchanged.
2896 break;
2897 case FREE_ORDER::BACKWARD:
2898 std::reverse(testAllocations.begin(), testAllocations.end());
2899 break;
2900 case FREE_ORDER::RANDOM:
2901 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2902 break;
2903 default: assert(0);
2904 }
2905
2906 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2907 for(size_t i = 0; i < allocCount; ++i)
2908 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2909 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2910
2911 testAllocations.clear();
2912 }
2913
2914 // Delete baseAllocations
2915 while(!baseAllocations.empty())
2916 {
2917 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2918 baseAllocations.pop_back();
2919 }
2920
2921 vmaDestroyPool(g_hAllocator, pool);
2922
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002923 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2924 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2925
Adam Sawicki80927152018-09-07 17:27:23 +02002926 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2927 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002928 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002929 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002930 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002931 allocTotalSeconds,
2932 freeTotalSeconds);
2933
2934 if(file)
2935 {
2936 std::string currTime;
2937 CurrentTimeToStr(currTime);
2938
Adam Sawicki80927152018-09-07 17:27:23 +02002939 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002940 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002941 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002942 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002943 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002944 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2945 allocTotalSeconds,
2946 freeTotalSeconds);
2947 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002948}
2949
Adam Sawicki80927152018-09-07 17:27:23 +02002950static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002951{
Adam Sawicki80927152018-09-07 17:27:23 +02002952 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002953
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002954 if(file)
2955 {
2956 fprintf(file,
2957 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002958 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002959 "Allocation time (s),Deallocation time (s)\n");
2960 }
2961
Adam Sawicki0a607132018-08-24 11:18:41 +02002962 uint32_t freeOrderCount = 1;
2963 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2964 freeOrderCount = 3;
2965 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2966 freeOrderCount = 2;
2967
2968 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002969 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002970
2971 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2972 {
2973 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2974 switch(freeOrderIndex)
2975 {
2976 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2977 case 1: freeOrder = FREE_ORDER::FORWARD; break;
2978 case 2: freeOrder = FREE_ORDER::RANDOM; break;
2979 default: assert(0);
2980 }
2981
2982 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
2983 {
Adam Sawicki80927152018-09-07 17:27:23 +02002984 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02002985 {
Adam Sawicki80927152018-09-07 17:27:23 +02002986 uint32_t algorithm = 0;
2987 switch(algorithmIndex)
2988 {
2989 case 0:
2990 break;
2991 case 1:
2992 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
2993 break;
2994 case 2:
2995 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2996 break;
2997 default:
2998 assert(0);
2999 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003000
Adam Sawicki80927152018-09-07 17:27:23 +02003001 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003002 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3003 {
3004 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003005 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003006 {
3007 switch(allocStrategyIndex)
3008 {
3009 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3010 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3011 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3012 default: assert(0);
3013 }
3014 }
3015
Adam Sawicki80927152018-09-07 17:27:23 +02003016 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003017 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003018 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003019 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003020 strategy,
3021 freeOrder); // freeOrder
3022 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003023 }
3024 }
3025 }
3026}
3027
Adam Sawickib8333fb2018-03-13 16:15:53 +01003028static void TestPool_SameSize()
3029{
3030 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3031 const size_t BUF_COUNT = 100;
3032 VkResult res;
3033
3034 RandomNumberGenerator rand{123};
3035
3036 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3037 bufferInfo.size = BUF_SIZE;
3038 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3039
3040 uint32_t memoryTypeBits = UINT32_MAX;
3041 {
3042 VkBuffer dummyBuffer;
3043 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003044 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003045
3046 VkMemoryRequirements memReq;
3047 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3048 memoryTypeBits = memReq.memoryTypeBits;
3049
3050 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3051 }
3052
3053 VmaAllocationCreateInfo poolAllocInfo = {};
3054 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3055 uint32_t memTypeIndex;
3056 res = vmaFindMemoryTypeIndex(
3057 g_hAllocator,
3058 memoryTypeBits,
3059 &poolAllocInfo,
3060 &memTypeIndex);
3061
3062 VmaPoolCreateInfo poolCreateInfo = {};
3063 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3064 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3065 poolCreateInfo.minBlockCount = 1;
3066 poolCreateInfo.maxBlockCount = 4;
3067 poolCreateInfo.frameInUseCount = 0;
3068
3069 VmaPool pool;
3070 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003071 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003072
3073 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3074
3075 VmaAllocationCreateInfo allocInfo = {};
3076 allocInfo.pool = pool;
3077 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3078 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3079
3080 struct BufItem
3081 {
3082 VkBuffer Buf;
3083 VmaAllocation Alloc;
3084 };
3085 std::vector<BufItem> items;
3086
3087 // Fill entire pool.
3088 for(size_t i = 0; i < BUF_COUNT; ++i)
3089 {
3090 BufItem item;
3091 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003092 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003093 items.push_back(item);
3094 }
3095
3096 // Make sure that another allocation would fail.
3097 {
3098 BufItem item;
3099 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003100 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003101 }
3102
3103 // Validate that no buffer is lost. Also check that they are not mapped.
3104 for(size_t i = 0; i < items.size(); ++i)
3105 {
3106 VmaAllocationInfo allocInfo;
3107 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003108 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3109 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003110 }
3111
3112 // Free some percent of random items.
3113 {
3114 const size_t PERCENT_TO_FREE = 10;
3115 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3116 for(size_t i = 0; i < itemsToFree; ++i)
3117 {
3118 size_t index = (size_t)rand.Generate() % items.size();
3119 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3120 items.erase(items.begin() + index);
3121 }
3122 }
3123
3124 // Randomly allocate and free items.
3125 {
3126 const size_t OPERATION_COUNT = BUF_COUNT;
3127 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3128 {
3129 bool allocate = rand.Generate() % 2 != 0;
3130 if(allocate)
3131 {
3132 if(items.size() < BUF_COUNT)
3133 {
3134 BufItem item;
3135 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003136 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003137 items.push_back(item);
3138 }
3139 }
3140 else // Free
3141 {
3142 if(!items.empty())
3143 {
3144 size_t index = (size_t)rand.Generate() % items.size();
3145 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3146 items.erase(items.begin() + index);
3147 }
3148 }
3149 }
3150 }
3151
3152 // Allocate up to maximum.
3153 while(items.size() < BUF_COUNT)
3154 {
3155 BufItem item;
3156 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003157 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003158 items.push_back(item);
3159 }
3160
3161 // Validate that no buffer is lost.
3162 for(size_t i = 0; i < items.size(); ++i)
3163 {
3164 VmaAllocationInfo allocInfo;
3165 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003166 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003167 }
3168
3169 // Next frame.
3170 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3171
3172 // Allocate another BUF_COUNT buffers.
3173 for(size_t i = 0; i < BUF_COUNT; ++i)
3174 {
3175 BufItem item;
3176 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003177 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003178 items.push_back(item);
3179 }
3180
3181 // Make sure the first BUF_COUNT is lost. Delete them.
3182 for(size_t i = 0; i < BUF_COUNT; ++i)
3183 {
3184 VmaAllocationInfo allocInfo;
3185 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003186 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003187 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3188 }
3189 items.erase(items.begin(), items.begin() + BUF_COUNT);
3190
3191 // Validate that no buffer is lost.
3192 for(size_t i = 0; i < items.size(); ++i)
3193 {
3194 VmaAllocationInfo allocInfo;
3195 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003196 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003197 }
3198
3199 // Free one item.
3200 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3201 items.pop_back();
3202
3203 // Validate statistics.
3204 {
3205 VmaPoolStats poolStats = {};
3206 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003207 TEST(poolStats.allocationCount == items.size());
3208 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3209 TEST(poolStats.unusedRangeCount == 1);
3210 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3211 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003212 }
3213
3214 // Free all remaining items.
3215 for(size_t i = items.size(); i--; )
3216 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3217 items.clear();
3218
3219 // Allocate maximum items again.
3220 for(size_t i = 0; i < BUF_COUNT; ++i)
3221 {
3222 BufItem item;
3223 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003224 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003225 items.push_back(item);
3226 }
3227
3228 // Delete every other item.
3229 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3230 {
3231 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3232 items.erase(items.begin() + i);
3233 }
3234
3235 // Defragment!
3236 {
3237 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3238 for(size_t i = 0; i < items.size(); ++i)
3239 allocationsToDefragment[i] = items[i].Alloc;
3240
3241 VmaDefragmentationStats defragmentationStats;
3242 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003243 TEST(res == VK_SUCCESS);
3244 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003245 }
3246
3247 // Free all remaining items.
3248 for(size_t i = items.size(); i--; )
3249 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3250 items.clear();
3251
3252 ////////////////////////////////////////////////////////////////////////////////
3253 // Test for vmaMakePoolAllocationsLost
3254
3255 // Allocate 4 buffers on frame 10.
3256 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3257 for(size_t i = 0; i < 4; ++i)
3258 {
3259 BufItem item;
3260 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003261 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003262 items.push_back(item);
3263 }
3264
3265 // Touch first 2 of them on frame 11.
3266 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3267 for(size_t i = 0; i < 2; ++i)
3268 {
3269 VmaAllocationInfo allocInfo;
3270 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3271 }
3272
3273 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3274 size_t lostCount = 0xDEADC0DE;
3275 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003276 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003277
3278 // Make another call. Now 0 should be lost.
3279 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003280 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003281
3282 // Make another call, with null count. Should not crash.
3283 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3284
3285 // END: Free all remaining items.
3286 for(size_t i = items.size(); i--; )
3287 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3288
3289 items.clear();
3290
Adam Sawickid2924172018-06-11 12:48:46 +02003291 ////////////////////////////////////////////////////////////////////////////////
3292 // Test for allocation too large for pool
3293
3294 {
3295 VmaAllocationCreateInfo allocCreateInfo = {};
3296 allocCreateInfo.pool = pool;
3297
3298 VkMemoryRequirements memReq;
3299 memReq.memoryTypeBits = UINT32_MAX;
3300 memReq.alignment = 1;
3301 memReq.size = poolCreateInfo.blockSize + 4;
3302
3303 VmaAllocation alloc = nullptr;
3304 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003305 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003306 }
3307
Adam Sawickib8333fb2018-03-13 16:15:53 +01003308 vmaDestroyPool(g_hAllocator, pool);
3309}
3310
Adam Sawickib0c36362018-11-13 16:17:38 +01003311static void TestResize()
3312{
3313 wprintf(L"Testing vmaResizeAllocation...\n");
3314
3315 const VkDeviceSize KILOBYTE = 1024ull;
3316 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3317
3318 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3319 bufCreateInfo.size = 2 * MEGABYTE;
3320 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3321
3322 VmaAllocationCreateInfo allocCreateInfo = {};
3323 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3324
3325 uint32_t memTypeIndex = UINT32_MAX;
3326 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3327
3328 VmaPoolCreateInfo poolCreateInfo = {};
3329 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3330 poolCreateInfo.blockSize = 8 * MEGABYTE;
3331 poolCreateInfo.minBlockCount = 1;
3332 poolCreateInfo.maxBlockCount = 1;
3333 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3334
3335 VmaPool pool;
3336 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3337
3338 allocCreateInfo.pool = pool;
3339
3340 // Fill 8 MB pool with 4 * 2 MB allocations.
3341 VmaAllocation allocs[4] = {};
3342
3343 VkMemoryRequirements memReq = {};
3344 memReq.memoryTypeBits = UINT32_MAX;
3345 memReq.alignment = 4;
3346 memReq.size = bufCreateInfo.size;
3347
3348 VmaAllocationInfo allocInfo = {};
3349
3350 for(uint32_t i = 0; i < 4; ++i)
3351 {
3352 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3353 }
3354
3355 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3356
3357 // Case: Resize to the same size always succeeds.
3358 {
3359 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3360 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3361 TEST(allocInfo.size == 2ull * 1024 * 1024);
3362 }
3363
3364 // Case: Shrink allocation at the end.
3365 {
3366 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3367 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3368 TEST(allocInfo.size == 1ull * 1024 * 1024);
3369 }
3370
3371 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3372
3373 // Case: Shrink allocation before free space.
3374 {
3375 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3376 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3377 TEST(allocInfo.size == 512 * KILOBYTE);
3378 }
3379
3380 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3381
3382 // Case: Shrink allocation before next allocation.
3383 {
3384 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3385 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3386 TEST(allocInfo.size == 1 * MEGABYTE);
3387 }
3388
3389 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3390
3391 // Case: Grow allocation while there is even more space available.
3392 {
3393 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3394 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3395 TEST(allocInfo.size == 1 * MEGABYTE);
3396 }
3397
3398 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3399
3400 // Case: Grow allocation while there is exact amount of free space available.
3401 {
3402 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3403 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3404 TEST(allocInfo.size == 2 * MEGABYTE);
3405 }
3406
3407 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3408
3409 // Case: Fail to grow when there is not enough free space due to next allocation.
3410 {
3411 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3412 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3413 TEST(allocInfo.size == 2 * MEGABYTE);
3414 }
3415
3416 // Case: Fail to grow when there is not enough free space due to end of memory block.
3417 {
3418 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3419 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3420 TEST(allocInfo.size == 1 * MEGABYTE);
3421 }
3422
3423 for(uint32_t i = 4; i--; )
3424 {
3425 vmaFreeMemory(g_hAllocator, allocs[i]);
3426 }
3427
3428 vmaDestroyPool(g_hAllocator, pool);
3429
3430 // Test dedicated allocation
3431 {
3432 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3433 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3434 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3435
3436 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3437 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3438
3439 // Case: Resize to the same size always succeeds.
3440 {
3441 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3442 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3443 TEST(allocInfo.size == 2ull * 1024 * 1024);
3444 }
3445
3446 // Case: Shrinking fails.
3447 {
3448 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3449 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3450 TEST(allocInfo.size == 2ull * 1024 * 1024);
3451 }
3452
3453 // Case: Growing fails.
3454 {
3455 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3456 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3457 TEST(allocInfo.size == 2ull * 1024 * 1024);
3458 }
3459
3460 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3461 }
3462}
3463
Adam Sawickie44c6262018-06-15 14:30:39 +02003464static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3465{
3466 const uint8_t* pBytes = (const uint8_t*)pMemory;
3467 for(size_t i = 0; i < size; ++i)
3468 {
3469 if(pBytes[i] != pattern)
3470 {
3471 return false;
3472 }
3473 }
3474 return true;
3475}
3476
3477static void TestAllocationsInitialization()
3478{
3479 VkResult res;
3480
3481 const size_t BUF_SIZE = 1024;
3482
3483 // Create pool.
3484
3485 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3486 bufInfo.size = BUF_SIZE;
3487 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3488
3489 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3490 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3491
3492 VmaPoolCreateInfo poolCreateInfo = {};
3493 poolCreateInfo.blockSize = BUF_SIZE * 10;
3494 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3495 poolCreateInfo.maxBlockCount = 1;
3496 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003497 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003498
3499 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3500 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003501 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003502
3503 // Create one persistently mapped buffer to keep memory of this block mapped,
3504 // so that pointer to mapped data will remain (more or less...) valid even
3505 // after destruction of other allocations.
3506
3507 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3508 VkBuffer firstBuf;
3509 VmaAllocation firstAlloc;
3510 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003511 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003512
3513 // Test buffers.
3514
3515 for(uint32_t i = 0; i < 2; ++i)
3516 {
3517 const bool persistentlyMapped = i == 0;
3518 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3519 VkBuffer buf;
3520 VmaAllocation alloc;
3521 VmaAllocationInfo allocInfo;
3522 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003523 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003524
3525 void* pMappedData;
3526 if(!persistentlyMapped)
3527 {
3528 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003529 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003530 }
3531 else
3532 {
3533 pMappedData = allocInfo.pMappedData;
3534 }
3535
3536 // Validate initialized content
3537 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003538 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003539
3540 if(!persistentlyMapped)
3541 {
3542 vmaUnmapMemory(g_hAllocator, alloc);
3543 }
3544
3545 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3546
3547 // Validate freed content
3548 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003549 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003550 }
3551
3552 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3553 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3554}
3555
Adam Sawickib8333fb2018-03-13 16:15:53 +01003556static void TestPool_Benchmark(
3557 PoolTestResult& outResult,
3558 const PoolTestConfig& config)
3559{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003560 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003561
3562 RandomNumberGenerator mainRand{config.RandSeed};
3563
3564 uint32_t allocationSizeProbabilitySum = std::accumulate(
3565 config.AllocationSizes.begin(),
3566 config.AllocationSizes.end(),
3567 0u,
3568 [](uint32_t sum, const AllocationSize& allocSize) {
3569 return sum + allocSize.Probability;
3570 });
3571
3572 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3573 bufferInfo.size = 256; // Whatever.
3574 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3575
3576 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3577 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3578 imageInfo.extent.width = 256; // Whatever.
3579 imageInfo.extent.height = 256; // Whatever.
3580 imageInfo.extent.depth = 1;
3581 imageInfo.mipLevels = 1;
3582 imageInfo.arrayLayers = 1;
3583 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3584 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3585 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3586 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3587 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3588
3589 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3590 {
3591 VkBuffer dummyBuffer;
3592 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003593 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003594
3595 VkMemoryRequirements memReq;
3596 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3597 bufferMemoryTypeBits = memReq.memoryTypeBits;
3598
3599 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3600 }
3601
3602 uint32_t imageMemoryTypeBits = UINT32_MAX;
3603 {
3604 VkImage dummyImage;
3605 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003606 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003607
3608 VkMemoryRequirements memReq;
3609 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3610 imageMemoryTypeBits = memReq.memoryTypeBits;
3611
3612 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3613 }
3614
3615 uint32_t memoryTypeBits = 0;
3616 if(config.UsesBuffers() && config.UsesImages())
3617 {
3618 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3619 if(memoryTypeBits == 0)
3620 {
3621 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3622 return;
3623 }
3624 }
3625 else if(config.UsesBuffers())
3626 memoryTypeBits = bufferMemoryTypeBits;
3627 else if(config.UsesImages())
3628 memoryTypeBits = imageMemoryTypeBits;
3629 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003630 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003631
3632 VmaPoolCreateInfo poolCreateInfo = {};
3633 poolCreateInfo.memoryTypeIndex = 0;
3634 poolCreateInfo.minBlockCount = 1;
3635 poolCreateInfo.maxBlockCount = 1;
3636 poolCreateInfo.blockSize = config.PoolSize;
3637 poolCreateInfo.frameInUseCount = 1;
3638
3639 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3640 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3641 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3642
3643 VmaPool pool;
3644 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003645 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003646
3647 // Start time measurement - after creating pool and initializing data structures.
3648 time_point timeBeg = std::chrono::high_resolution_clock::now();
3649
3650 ////////////////////////////////////////////////////////////////////////////////
3651 // ThreadProc
3652 auto ThreadProc = [&](
3653 PoolTestThreadResult* outThreadResult,
3654 uint32_t randSeed,
3655 HANDLE frameStartEvent,
3656 HANDLE frameEndEvent) -> void
3657 {
3658 RandomNumberGenerator threadRand{randSeed};
3659
3660 outThreadResult->AllocationTimeMin = duration::max();
3661 outThreadResult->AllocationTimeSum = duration::zero();
3662 outThreadResult->AllocationTimeMax = duration::min();
3663 outThreadResult->DeallocationTimeMin = duration::max();
3664 outThreadResult->DeallocationTimeSum = duration::zero();
3665 outThreadResult->DeallocationTimeMax = duration::min();
3666 outThreadResult->AllocationCount = 0;
3667 outThreadResult->DeallocationCount = 0;
3668 outThreadResult->LostAllocationCount = 0;
3669 outThreadResult->LostAllocationTotalSize = 0;
3670 outThreadResult->FailedAllocationCount = 0;
3671 outThreadResult->FailedAllocationTotalSize = 0;
3672
3673 struct Item
3674 {
3675 VkDeviceSize BufferSize;
3676 VkExtent2D ImageSize;
3677 VkBuffer Buf;
3678 VkImage Image;
3679 VmaAllocation Alloc;
3680
3681 VkDeviceSize CalcSizeBytes() const
3682 {
3683 return BufferSize +
3684 ImageSize.width * ImageSize.height * 4;
3685 }
3686 };
3687 std::vector<Item> unusedItems, usedItems;
3688
3689 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3690
3691 // Create all items - all unused, not yet allocated.
3692 for(size_t i = 0; i < threadTotalItemCount; ++i)
3693 {
3694 Item item = {};
3695
3696 uint32_t allocSizeIndex = 0;
3697 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3698 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3699 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3700
3701 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3702 if(allocSize.BufferSizeMax > 0)
3703 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003704 TEST(allocSize.BufferSizeMin > 0);
3705 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003706 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3707 item.BufferSize = allocSize.BufferSizeMin;
3708 else
3709 {
3710 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3711 item.BufferSize = item.BufferSize / 16 * 16;
3712 }
3713 }
3714 else
3715 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003716 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003717 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3718 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3719 else
3720 {
3721 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3722 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3723 }
3724 }
3725
3726 unusedItems.push_back(item);
3727 }
3728
3729 auto Allocate = [&](Item& item) -> VkResult
3730 {
3731 VmaAllocationCreateInfo allocCreateInfo = {};
3732 allocCreateInfo.pool = pool;
3733 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3734 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3735
3736 if(item.BufferSize)
3737 {
3738 bufferInfo.size = item.BufferSize;
3739 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3740 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3741 }
3742 else
3743 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003744 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003745
3746 imageInfo.extent.width = item.ImageSize.width;
3747 imageInfo.extent.height = item.ImageSize.height;
3748 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3749 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3750 }
3751 };
3752
3753 ////////////////////////////////////////////////////////////////////////////////
3754 // Frames
3755 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3756 {
3757 WaitForSingleObject(frameStartEvent, INFINITE);
3758
3759 // Always make some percent of used bufs unused, to choose different used ones.
3760 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3761 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3762 {
3763 size_t index = threadRand.Generate() % usedItems.size();
3764 unusedItems.push_back(usedItems[index]);
3765 usedItems.erase(usedItems.begin() + index);
3766 }
3767
3768 // Determine which bufs we want to use in this frame.
3769 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3770 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003771 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003772 // Move some used to unused.
3773 while(usedBufCount < usedItems.size())
3774 {
3775 size_t index = threadRand.Generate() % usedItems.size();
3776 unusedItems.push_back(usedItems[index]);
3777 usedItems.erase(usedItems.begin() + index);
3778 }
3779 // Move some unused to used.
3780 while(usedBufCount > usedItems.size())
3781 {
3782 size_t index = threadRand.Generate() % unusedItems.size();
3783 usedItems.push_back(unusedItems[index]);
3784 unusedItems.erase(unusedItems.begin() + index);
3785 }
3786
3787 uint32_t touchExistingCount = 0;
3788 uint32_t touchLostCount = 0;
3789 uint32_t createSucceededCount = 0;
3790 uint32_t createFailedCount = 0;
3791
3792 // Touch all used bufs. If not created or lost, allocate.
3793 for(size_t i = 0; i < usedItems.size(); ++i)
3794 {
3795 Item& item = usedItems[i];
3796 // Not yet created.
3797 if(item.Alloc == VK_NULL_HANDLE)
3798 {
3799 res = Allocate(item);
3800 ++outThreadResult->AllocationCount;
3801 if(res != VK_SUCCESS)
3802 {
3803 item.Alloc = VK_NULL_HANDLE;
3804 item.Buf = VK_NULL_HANDLE;
3805 ++outThreadResult->FailedAllocationCount;
3806 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3807 ++createFailedCount;
3808 }
3809 else
3810 ++createSucceededCount;
3811 }
3812 else
3813 {
3814 // Touch.
3815 VmaAllocationInfo allocInfo;
3816 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3817 // Lost.
3818 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3819 {
3820 ++touchLostCount;
3821
3822 // Destroy.
3823 {
3824 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3825 if(item.Buf)
3826 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3827 else
3828 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3829 ++outThreadResult->DeallocationCount;
3830 }
3831 item.Alloc = VK_NULL_HANDLE;
3832 item.Buf = VK_NULL_HANDLE;
3833
3834 ++outThreadResult->LostAllocationCount;
3835 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3836
3837 // Recreate.
3838 res = Allocate(item);
3839 ++outThreadResult->AllocationCount;
3840 // Creation failed.
3841 if(res != VK_SUCCESS)
3842 {
3843 ++outThreadResult->FailedAllocationCount;
3844 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3845 ++createFailedCount;
3846 }
3847 else
3848 ++createSucceededCount;
3849 }
3850 else
3851 ++touchExistingCount;
3852 }
3853 }
3854
3855 /*
3856 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3857 randSeed, frameIndex,
3858 touchExistingCount, touchLostCount,
3859 createSucceededCount, createFailedCount);
3860 */
3861
3862 SetEvent(frameEndEvent);
3863 }
3864
3865 // Free all remaining items.
3866 for(size_t i = usedItems.size(); i--; )
3867 {
3868 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3869 if(usedItems[i].Buf)
3870 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3871 else
3872 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3873 ++outThreadResult->DeallocationCount;
3874 }
3875 for(size_t i = unusedItems.size(); i--; )
3876 {
3877 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3878 if(unusedItems[i].Buf)
3879 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3880 else
3881 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3882 ++outThreadResult->DeallocationCount;
3883 }
3884 };
3885
3886 // Launch threads.
3887 uint32_t threadRandSeed = mainRand.Generate();
3888 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3889 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3890 std::vector<std::thread> bkgThreads;
3891 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3892 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3893 {
3894 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3895 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3896 bkgThreads.emplace_back(std::bind(
3897 ThreadProc,
3898 &threadResults[threadIndex],
3899 threadRandSeed + threadIndex,
3900 frameStartEvents[threadIndex],
3901 frameEndEvents[threadIndex]));
3902 }
3903
3904 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003905 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003906 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3907 {
3908 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3909 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3910 SetEvent(frameStartEvents[threadIndex]);
3911 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3912 }
3913
3914 // Wait for threads finished
3915 for(size_t i = 0; i < bkgThreads.size(); ++i)
3916 {
3917 bkgThreads[i].join();
3918 CloseHandle(frameEndEvents[i]);
3919 CloseHandle(frameStartEvents[i]);
3920 }
3921 bkgThreads.clear();
3922
3923 // Finish time measurement - before destroying pool.
3924 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3925
3926 vmaDestroyPool(g_hAllocator, pool);
3927
3928 outResult.AllocationTimeMin = duration::max();
3929 outResult.AllocationTimeAvg = duration::zero();
3930 outResult.AllocationTimeMax = duration::min();
3931 outResult.DeallocationTimeMin = duration::max();
3932 outResult.DeallocationTimeAvg = duration::zero();
3933 outResult.DeallocationTimeMax = duration::min();
3934 outResult.LostAllocationCount = 0;
3935 outResult.LostAllocationTotalSize = 0;
3936 outResult.FailedAllocationCount = 0;
3937 outResult.FailedAllocationTotalSize = 0;
3938 size_t allocationCount = 0;
3939 size_t deallocationCount = 0;
3940 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3941 {
3942 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3943 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3944 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3945 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3946 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3947 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3948 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3949 allocationCount += threadResult.AllocationCount;
3950 deallocationCount += threadResult.DeallocationCount;
3951 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3952 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3953 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3954 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3955 }
3956 if(allocationCount)
3957 outResult.AllocationTimeAvg /= allocationCount;
3958 if(deallocationCount)
3959 outResult.DeallocationTimeAvg /= deallocationCount;
3960}
3961
3962static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3963{
3964 if(ptr1 < ptr2)
3965 return ptr1 + size1 > ptr2;
3966 else if(ptr2 < ptr1)
3967 return ptr2 + size2 > ptr1;
3968 else
3969 return true;
3970}
3971
3972static void TestMapping()
3973{
3974 wprintf(L"Testing mapping...\n");
3975
3976 VkResult res;
3977 uint32_t memTypeIndex = UINT32_MAX;
3978
3979 enum TEST
3980 {
3981 TEST_NORMAL,
3982 TEST_POOL,
3983 TEST_DEDICATED,
3984 TEST_COUNT
3985 };
3986 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3987 {
3988 VmaPool pool = nullptr;
3989 if(testIndex == TEST_POOL)
3990 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003991 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003992 VmaPoolCreateInfo poolInfo = {};
3993 poolInfo.memoryTypeIndex = memTypeIndex;
3994 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003995 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003996 }
3997
3998 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3999 bufInfo.size = 0x10000;
4000 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4001
4002 VmaAllocationCreateInfo allocCreateInfo = {};
4003 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4004 allocCreateInfo.pool = pool;
4005 if(testIndex == TEST_DEDICATED)
4006 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4007
4008 VmaAllocationInfo allocInfo;
4009
4010 // Mapped manually
4011
4012 // Create 2 buffers.
4013 BufferInfo bufferInfos[3];
4014 for(size_t i = 0; i < 2; ++i)
4015 {
4016 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4017 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004018 TEST(res == VK_SUCCESS);
4019 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004020 memTypeIndex = allocInfo.memoryType;
4021 }
4022
4023 // Map buffer 0.
4024 char* data00 = nullptr;
4025 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004026 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004027 data00[0xFFFF] = data00[0];
4028
4029 // Map buffer 0 second time.
4030 char* data01 = nullptr;
4031 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004032 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004033
4034 // Map buffer 1.
4035 char* data1 = nullptr;
4036 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004037 TEST(res == VK_SUCCESS && data1 != nullptr);
4038 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004039 data1[0xFFFF] = data1[0];
4040
4041 // Unmap buffer 0 two times.
4042 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4043 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4044 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004045 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004046
4047 // Unmap buffer 1.
4048 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4049 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004050 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004051
4052 // Create 3rd buffer - persistently mapped.
4053 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4054 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4055 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004056 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004057
4058 // Map buffer 2.
4059 char* data2 = nullptr;
4060 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004061 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004062 data2[0xFFFF] = data2[0];
4063
4064 // Unmap buffer 2.
4065 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4066 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004067 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004068
4069 // Destroy all buffers.
4070 for(size_t i = 3; i--; )
4071 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4072
4073 vmaDestroyPool(g_hAllocator, pool);
4074 }
4075}
4076
4077static void TestMappingMultithreaded()
4078{
4079 wprintf(L"Testing mapping multithreaded...\n");
4080
4081 static const uint32_t threadCount = 16;
4082 static const uint32_t bufferCount = 1024;
4083 static const uint32_t threadBufferCount = bufferCount / threadCount;
4084
4085 VkResult res;
4086 volatile uint32_t memTypeIndex = UINT32_MAX;
4087
4088 enum TEST
4089 {
4090 TEST_NORMAL,
4091 TEST_POOL,
4092 TEST_DEDICATED,
4093 TEST_COUNT
4094 };
4095 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4096 {
4097 VmaPool pool = nullptr;
4098 if(testIndex == TEST_POOL)
4099 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004100 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004101 VmaPoolCreateInfo poolInfo = {};
4102 poolInfo.memoryTypeIndex = memTypeIndex;
4103 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004104 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004105 }
4106
4107 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4108 bufCreateInfo.size = 0x10000;
4109 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4110
4111 VmaAllocationCreateInfo allocCreateInfo = {};
4112 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4113 allocCreateInfo.pool = pool;
4114 if(testIndex == TEST_DEDICATED)
4115 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4116
4117 std::thread threads[threadCount];
4118 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4119 {
4120 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4121 // ======== THREAD FUNCTION ========
4122
4123 RandomNumberGenerator rand{threadIndex};
4124
4125 enum class MODE
4126 {
4127 // Don't map this buffer at all.
4128 DONT_MAP,
4129 // Map and quickly unmap.
4130 MAP_FOR_MOMENT,
4131 // Map and unmap before destruction.
4132 MAP_FOR_LONGER,
4133 // Map two times. Quickly unmap, second unmap before destruction.
4134 MAP_TWO_TIMES,
4135 // Create this buffer as persistently mapped.
4136 PERSISTENTLY_MAPPED,
4137 COUNT
4138 };
4139 std::vector<BufferInfo> bufInfos{threadBufferCount};
4140 std::vector<MODE> bufModes{threadBufferCount};
4141
4142 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4143 {
4144 BufferInfo& bufInfo = bufInfos[bufferIndex];
4145 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4146 bufModes[bufferIndex] = mode;
4147
4148 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4149 if(mode == MODE::PERSISTENTLY_MAPPED)
4150 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4151
4152 VmaAllocationInfo allocInfo;
4153 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4154 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004155 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004156
4157 if(memTypeIndex == UINT32_MAX)
4158 memTypeIndex = allocInfo.memoryType;
4159
4160 char* data = nullptr;
4161
4162 if(mode == MODE::PERSISTENTLY_MAPPED)
4163 {
4164 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004165 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004166 }
4167 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4168 mode == MODE::MAP_TWO_TIMES)
4169 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004170 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004171 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004172 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004173
4174 if(mode == MODE::MAP_TWO_TIMES)
4175 {
4176 char* data2 = nullptr;
4177 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004178 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004179 }
4180 }
4181 else if(mode == MODE::DONT_MAP)
4182 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004183 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004184 }
4185 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004186 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004187
4188 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4189 if(data)
4190 data[0xFFFF] = data[0];
4191
4192 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4193 {
4194 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4195
4196 VmaAllocationInfo allocInfo;
4197 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4198 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004199 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004200 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004201 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004202 }
4203
4204 switch(rand.Generate() % 3)
4205 {
4206 case 0: Sleep(0); break; // Yield.
4207 case 1: Sleep(10); break; // 10 ms
4208 // default: No sleep.
4209 }
4210
4211 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4212 if(data)
4213 data[0xFFFF] = data[0];
4214 }
4215
4216 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4217 {
4218 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4219 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4220 {
4221 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4222
4223 VmaAllocationInfo allocInfo;
4224 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004225 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004226 }
4227
4228 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4229 }
4230 });
4231 }
4232
4233 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4234 threads[threadIndex].join();
4235
4236 vmaDestroyPool(g_hAllocator, pool);
4237 }
4238}
4239
4240static void WriteMainTestResultHeader(FILE* file)
4241{
4242 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004243 "Code,Time,"
4244 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004245 "Total Time (us),"
4246 "Allocation Time Min (us),"
4247 "Allocation Time Avg (us),"
4248 "Allocation Time Max (us),"
4249 "Deallocation Time Min (us),"
4250 "Deallocation Time Avg (us),"
4251 "Deallocation Time Max (us),"
4252 "Total Memory Allocated (B),"
4253 "Free Range Size Avg (B),"
4254 "Free Range Size Max (B)\n");
4255}
4256
4257static void WriteMainTestResult(
4258 FILE* file,
4259 const char* codeDescription,
4260 const char* testDescription,
4261 const Config& config, const Result& result)
4262{
4263 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4264 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4265 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4266 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4267 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4268 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4269 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4270
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004271 std::string currTime;
4272 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004273
4274 fprintf(file,
4275 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004276 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4277 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004278 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004279 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004280 totalTimeSeconds * 1e6f,
4281 allocationTimeMinSeconds * 1e6f,
4282 allocationTimeAvgSeconds * 1e6f,
4283 allocationTimeMaxSeconds * 1e6f,
4284 deallocationTimeMinSeconds * 1e6f,
4285 deallocationTimeAvgSeconds * 1e6f,
4286 deallocationTimeMaxSeconds * 1e6f,
4287 result.TotalMemoryAllocated,
4288 result.FreeRangeSizeAvg,
4289 result.FreeRangeSizeMax);
4290}
4291
4292static void WritePoolTestResultHeader(FILE* file)
4293{
4294 fprintf(file,
4295 "Code,Test,Time,"
4296 "Config,"
4297 "Total Time (us),"
4298 "Allocation Time Min (us),"
4299 "Allocation Time Avg (us),"
4300 "Allocation Time Max (us),"
4301 "Deallocation Time Min (us),"
4302 "Deallocation Time Avg (us),"
4303 "Deallocation Time Max (us),"
4304 "Lost Allocation Count,"
4305 "Lost Allocation Total Size (B),"
4306 "Failed Allocation Count,"
4307 "Failed Allocation Total Size (B)\n");
4308}
4309
4310static void WritePoolTestResult(
4311 FILE* file,
4312 const char* codeDescription,
4313 const char* testDescription,
4314 const PoolTestConfig& config,
4315 const PoolTestResult& result)
4316{
4317 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4318 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4319 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4320 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4321 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4322 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4323 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4324
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004325 std::string currTime;
4326 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004327
4328 fprintf(file,
4329 "%s,%s,%s,"
4330 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4331 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4332 // General
4333 codeDescription,
4334 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004335 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004336 // Config
4337 config.ThreadCount,
4338 (unsigned long long)config.PoolSize,
4339 config.FrameCount,
4340 config.TotalItemCount,
4341 config.UsedItemCountMin,
4342 config.UsedItemCountMax,
4343 config.ItemsToMakeUnusedPercent,
4344 // Results
4345 totalTimeSeconds * 1e6f,
4346 allocationTimeMinSeconds * 1e6f,
4347 allocationTimeAvgSeconds * 1e6f,
4348 allocationTimeMaxSeconds * 1e6f,
4349 deallocationTimeMinSeconds * 1e6f,
4350 deallocationTimeAvgSeconds * 1e6f,
4351 deallocationTimeMaxSeconds * 1e6f,
4352 result.LostAllocationCount,
4353 result.LostAllocationTotalSize,
4354 result.FailedAllocationCount,
4355 result.FailedAllocationTotalSize);
4356}
4357
4358static void PerformCustomMainTest(FILE* file)
4359{
4360 Config config{};
4361 config.RandSeed = 65735476;
4362 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4363 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4364 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4365 config.FreeOrder = FREE_ORDER::FORWARD;
4366 config.ThreadCount = 16;
4367 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004368 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004369
4370 // Buffers
4371 //config.AllocationSizes.push_back({4, 16, 1024});
4372 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4373
4374 // Images
4375 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4376 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4377
4378 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4379 config.AdditionalOperationCount = 1024;
4380
4381 Result result{};
4382 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004383 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004384 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4385}
4386
4387static void PerformCustomPoolTest(FILE* file)
4388{
4389 PoolTestConfig config;
4390 config.PoolSize = 100 * 1024 * 1024;
4391 config.RandSeed = 2345764;
4392 config.ThreadCount = 1;
4393 config.FrameCount = 200;
4394 config.ItemsToMakeUnusedPercent = 2;
4395
4396 AllocationSize allocSize = {};
4397 allocSize.BufferSizeMin = 1024;
4398 allocSize.BufferSizeMax = 1024 * 1024;
4399 allocSize.Probability = 1;
4400 config.AllocationSizes.push_back(allocSize);
4401
4402 allocSize.BufferSizeMin = 0;
4403 allocSize.BufferSizeMax = 0;
4404 allocSize.ImageSizeMin = 128;
4405 allocSize.ImageSizeMax = 1024;
4406 allocSize.Probability = 1;
4407 config.AllocationSizes.push_back(allocSize);
4408
4409 config.PoolSize = config.CalcAvgResourceSize() * 200;
4410 config.UsedItemCountMax = 160;
4411 config.TotalItemCount = config.UsedItemCountMax * 10;
4412 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4413
4414 g_MemoryAliasingWarningEnabled = false;
4415 PoolTestResult result = {};
4416 TestPool_Benchmark(result, config);
4417 g_MemoryAliasingWarningEnabled = true;
4418
4419 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4420}
4421
Adam Sawickib8333fb2018-03-13 16:15:53 +01004422static void PerformMainTests(FILE* file)
4423{
4424 uint32_t repeatCount = 1;
4425 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4426
4427 Config config{};
4428 config.RandSeed = 65735476;
4429 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4430 config.FreeOrder = FREE_ORDER::FORWARD;
4431
4432 size_t threadCountCount = 1;
4433 switch(ConfigType)
4434 {
4435 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4436 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4437 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4438 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4439 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4440 default: assert(0);
4441 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004442
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004443 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004444
Adam Sawickib8333fb2018-03-13 16:15:53 +01004445 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4446 {
4447 std::string desc1;
4448
4449 switch(threadCountIndex)
4450 {
4451 case 0:
4452 desc1 += "1_thread";
4453 config.ThreadCount = 1;
4454 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4455 break;
4456 case 1:
4457 desc1 += "16_threads+0%_common";
4458 config.ThreadCount = 16;
4459 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4460 break;
4461 case 2:
4462 desc1 += "16_threads+50%_common";
4463 config.ThreadCount = 16;
4464 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4465 break;
4466 case 3:
4467 desc1 += "16_threads+100%_common";
4468 config.ThreadCount = 16;
4469 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4470 break;
4471 case 4:
4472 desc1 += "2_threads+0%_common";
4473 config.ThreadCount = 2;
4474 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4475 break;
4476 case 5:
4477 desc1 += "2_threads+50%_common";
4478 config.ThreadCount = 2;
4479 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4480 break;
4481 case 6:
4482 desc1 += "2_threads+100%_common";
4483 config.ThreadCount = 2;
4484 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4485 break;
4486 default:
4487 assert(0);
4488 }
4489
4490 // 0 = buffers, 1 = images, 2 = buffers and images
4491 size_t buffersVsImagesCount = 2;
4492 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4493 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4494 {
4495 std::string desc2 = desc1;
4496 switch(buffersVsImagesIndex)
4497 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004498 case 0: desc2 += ",Buffers"; break;
4499 case 1: desc2 += ",Images"; break;
4500 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004501 default: assert(0);
4502 }
4503
4504 // 0 = small, 1 = large, 2 = small and large
4505 size_t smallVsLargeCount = 2;
4506 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4507 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4508 {
4509 std::string desc3 = desc2;
4510 switch(smallVsLargeIndex)
4511 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004512 case 0: desc3 += ",Small"; break;
4513 case 1: desc3 += ",Large"; break;
4514 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004515 default: assert(0);
4516 }
4517
4518 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4519 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4520 else
4521 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4522
4523 // 0 = varying sizes min...max, 1 = set of constant sizes
4524 size_t constantSizesCount = 1;
4525 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4526 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4527 {
4528 std::string desc4 = desc3;
4529 switch(constantSizesIndex)
4530 {
4531 case 0: desc4 += " Varying_sizes"; break;
4532 case 1: desc4 += " Constant_sizes"; break;
4533 default: assert(0);
4534 }
4535
4536 config.AllocationSizes.clear();
4537 // Buffers present
4538 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4539 {
4540 // Small
4541 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4542 {
4543 // Varying size
4544 if(constantSizesIndex == 0)
4545 config.AllocationSizes.push_back({4, 16, 1024});
4546 // Constant sizes
4547 else
4548 {
4549 config.AllocationSizes.push_back({1, 16, 16});
4550 config.AllocationSizes.push_back({1, 64, 64});
4551 config.AllocationSizes.push_back({1, 256, 256});
4552 config.AllocationSizes.push_back({1, 1024, 1024});
4553 }
4554 }
4555 // Large
4556 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4557 {
4558 // Varying size
4559 if(constantSizesIndex == 0)
4560 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4561 // Constant sizes
4562 else
4563 {
4564 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4565 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4566 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4567 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4568 }
4569 }
4570 }
4571 // Images present
4572 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4573 {
4574 // Small
4575 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4576 {
4577 // Varying size
4578 if(constantSizesIndex == 0)
4579 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4580 // Constant sizes
4581 else
4582 {
4583 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4584 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4585 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4586 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4587 }
4588 }
4589 // Large
4590 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4591 {
4592 // Varying size
4593 if(constantSizesIndex == 0)
4594 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4595 // Constant sizes
4596 else
4597 {
4598 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4599 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4600 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4601 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4602 }
4603 }
4604 }
4605
4606 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4607 size_t beginBytesToAllocateCount = 1;
4608 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4609 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4610 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4611 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4612 {
4613 std::string desc5 = desc4;
4614
4615 switch(beginBytesToAllocateIndex)
4616 {
4617 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004618 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004619 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4620 config.AdditionalOperationCount = 0;
4621 break;
4622 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004623 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004624 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4625 config.AdditionalOperationCount = 1024;
4626 break;
4627 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004628 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004629 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4630 config.AdditionalOperationCount = 1024;
4631 break;
4632 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004633 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004634 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4635 config.AdditionalOperationCount = 1024;
4636 break;
4637 default:
4638 assert(0);
4639 }
4640
Adam Sawicki0667e332018-08-24 17:26:44 +02004641 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004642 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004643 std::string desc6 = desc5;
4644 switch(strategyIndex)
4645 {
4646 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004647 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004648 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4649 break;
4650 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004651 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004652 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4653 break;
4654 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004655 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004656 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4657 break;
4658 default:
4659 assert(0);
4660 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004661
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004662 desc6 += ',';
4663 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004664
4665 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004666
4667 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4668 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004669 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004670
4671 Result result{};
4672 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004673 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004674 if(file)
4675 {
4676 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4677 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004678 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004679 }
4680 }
4681 }
4682 }
4683 }
4684 }
4685}
4686
4687static void PerformPoolTests(FILE* file)
4688{
4689 const size_t AVG_RESOURCES_PER_POOL = 300;
4690
4691 uint32_t repeatCount = 1;
4692 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4693
4694 PoolTestConfig config{};
4695 config.RandSeed = 2346343;
4696 config.FrameCount = 200;
4697 config.ItemsToMakeUnusedPercent = 2;
4698
4699 size_t threadCountCount = 1;
4700 switch(ConfigType)
4701 {
4702 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4703 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4704 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4705 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4706 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4707 default: assert(0);
4708 }
4709 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4710 {
4711 std::string desc1;
4712
4713 switch(threadCountIndex)
4714 {
4715 case 0:
4716 desc1 += "1_thread";
4717 config.ThreadCount = 1;
4718 break;
4719 case 1:
4720 desc1 += "16_threads";
4721 config.ThreadCount = 16;
4722 break;
4723 case 2:
4724 desc1 += "2_threads";
4725 config.ThreadCount = 2;
4726 break;
4727 default:
4728 assert(0);
4729 }
4730
4731 // 0 = buffers, 1 = images, 2 = buffers and images
4732 size_t buffersVsImagesCount = 2;
4733 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4734 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4735 {
4736 std::string desc2 = desc1;
4737 switch(buffersVsImagesIndex)
4738 {
4739 case 0: desc2 += " Buffers"; break;
4740 case 1: desc2 += " Images"; break;
4741 case 2: desc2 += " Buffers+Images"; break;
4742 default: assert(0);
4743 }
4744
4745 // 0 = small, 1 = large, 2 = small and large
4746 size_t smallVsLargeCount = 2;
4747 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4748 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4749 {
4750 std::string desc3 = desc2;
4751 switch(smallVsLargeIndex)
4752 {
4753 case 0: desc3 += " Small"; break;
4754 case 1: desc3 += " Large"; break;
4755 case 2: desc3 += " Small+Large"; break;
4756 default: assert(0);
4757 }
4758
4759 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4760 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4761 else
4762 config.PoolSize = 4ull * 1024 * 1024;
4763
4764 // 0 = varying sizes min...max, 1 = set of constant sizes
4765 size_t constantSizesCount = 1;
4766 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4767 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4768 {
4769 std::string desc4 = desc3;
4770 switch(constantSizesIndex)
4771 {
4772 case 0: desc4 += " Varying_sizes"; break;
4773 case 1: desc4 += " Constant_sizes"; break;
4774 default: assert(0);
4775 }
4776
4777 config.AllocationSizes.clear();
4778 // Buffers present
4779 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4780 {
4781 // Small
4782 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4783 {
4784 // Varying size
4785 if(constantSizesIndex == 0)
4786 config.AllocationSizes.push_back({4, 16, 1024});
4787 // Constant sizes
4788 else
4789 {
4790 config.AllocationSizes.push_back({1, 16, 16});
4791 config.AllocationSizes.push_back({1, 64, 64});
4792 config.AllocationSizes.push_back({1, 256, 256});
4793 config.AllocationSizes.push_back({1, 1024, 1024});
4794 }
4795 }
4796 // Large
4797 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4798 {
4799 // Varying size
4800 if(constantSizesIndex == 0)
4801 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4802 // Constant sizes
4803 else
4804 {
4805 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4806 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4807 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4808 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4809 }
4810 }
4811 }
4812 // Images present
4813 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4814 {
4815 // Small
4816 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4817 {
4818 // Varying size
4819 if(constantSizesIndex == 0)
4820 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4821 // Constant sizes
4822 else
4823 {
4824 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4825 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4826 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4827 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4828 }
4829 }
4830 // Large
4831 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4832 {
4833 // Varying size
4834 if(constantSizesIndex == 0)
4835 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4836 // Constant sizes
4837 else
4838 {
4839 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4840 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4841 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4842 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4843 }
4844 }
4845 }
4846
4847 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4848 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4849
4850 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4851 size_t subscriptionModeCount;
4852 switch(ConfigType)
4853 {
4854 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4855 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4856 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4857 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4858 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4859 default: assert(0);
4860 }
4861 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4862 {
4863 std::string desc5 = desc4;
4864
4865 switch(subscriptionModeIndex)
4866 {
4867 case 0:
4868 desc5 += " Subscription_66%";
4869 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4870 break;
4871 case 1:
4872 desc5 += " Subscription_133%";
4873 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4874 break;
4875 case 2:
4876 desc5 += " Subscription_100%";
4877 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4878 break;
4879 case 3:
4880 desc5 += " Subscription_33%";
4881 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4882 break;
4883 case 4:
4884 desc5 += " Subscription_166%";
4885 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4886 break;
4887 default:
4888 assert(0);
4889 }
4890
4891 config.TotalItemCount = config.UsedItemCountMax * 5;
4892 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4893
4894 const char* testDescription = desc5.c_str();
4895
4896 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4897 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004898 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004899
4900 PoolTestResult result{};
4901 g_MemoryAliasingWarningEnabled = false;
4902 TestPool_Benchmark(result, config);
4903 g_MemoryAliasingWarningEnabled = true;
4904 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4905 }
4906 }
4907 }
4908 }
4909 }
4910 }
4911}
4912
Adam Sawickia83793a2018-09-03 13:40:42 +02004913static void BasicTestBuddyAllocator()
4914{
4915 wprintf(L"Basic test buddy allocator\n");
4916
4917 RandomNumberGenerator rand{76543};
4918
4919 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4920 sampleBufCreateInfo.size = 1024; // Whatever.
4921 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4922
4923 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4924 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4925
4926 VmaPoolCreateInfo poolCreateInfo = {};
4927 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004928 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004929
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004930 // Deliberately adding 1023 to test usable size smaller than memory block size.
4931 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004932 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004933 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004934
4935 VmaPool pool = nullptr;
4936 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004937 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004938
4939 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4940
4941 VmaAllocationCreateInfo allocCreateInfo = {};
4942 allocCreateInfo.pool = pool;
4943
4944 std::vector<BufferInfo> bufInfo;
4945 BufferInfo newBufInfo;
4946 VmaAllocationInfo allocInfo;
4947
4948 bufCreateInfo.size = 1024 * 256;
4949 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4950 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004951 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004952 bufInfo.push_back(newBufInfo);
4953
4954 bufCreateInfo.size = 1024 * 512;
4955 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4956 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004957 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004958 bufInfo.push_back(newBufInfo);
4959
4960 bufCreateInfo.size = 1024 * 128;
4961 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4962 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004963 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004964 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004965
4966 // Test very small allocation, smaller than minimum node size.
4967 bufCreateInfo.size = 1;
4968 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4969 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004970 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004971 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004972
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004973 // Test some small allocation with alignment requirement.
4974 {
4975 VkMemoryRequirements memReq;
4976 memReq.alignment = 256;
4977 memReq.memoryTypeBits = UINT32_MAX;
4978 memReq.size = 32;
4979
4980 newBufInfo.Buffer = VK_NULL_HANDLE;
4981 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4982 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004983 TEST(res == VK_SUCCESS);
4984 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004985 bufInfo.push_back(newBufInfo);
4986 }
4987
4988 //SaveAllocatorStatsToFile(L"TEST.json");
4989
Adam Sawicki21017c62018-09-07 15:26:59 +02004990 VmaPoolStats stats = {};
4991 vmaGetPoolStats(g_hAllocator, pool, &stats);
4992 int DBG = 0; // Set breakpoint here to inspect `stats`.
4993
Adam Sawicki80927152018-09-07 17:27:23 +02004994 // Allocate enough new buffers to surely fall into second block.
4995 for(uint32_t i = 0; i < 32; ++i)
4996 {
4997 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4998 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4999 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005000 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02005001 bufInfo.push_back(newBufInfo);
5002 }
5003
5004 SaveAllocatorStatsToFile(L"BuddyTest01.json");
5005
Adam Sawickia83793a2018-09-03 13:40:42 +02005006 // Destroy the buffers in random order.
5007 while(!bufInfo.empty())
5008 {
5009 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
5010 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
5011 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
5012 bufInfo.erase(bufInfo.begin() + indexToDestroy);
5013 }
5014
5015 vmaDestroyPool(g_hAllocator, pool);
5016}
5017
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005018static void BasicTestAllocatePages()
5019{
5020 wprintf(L"Basic test allocate pages\n");
5021
5022 RandomNumberGenerator rand{765461};
5023
5024 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5025 sampleBufCreateInfo.size = 1024; // Whatever.
5026 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5027
5028 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5029 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5030
5031 VmaPoolCreateInfo poolCreateInfo = {};
5032 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02005033 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005034
5035 // 1 block of 1 MB.
5036 poolCreateInfo.blockSize = 1024 * 1024;
5037 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
5038
5039 // Create pool.
5040 VmaPool pool = nullptr;
5041 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02005042 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005043
5044 // Make 100 allocations of 4 KB - they should fit into the pool.
5045 VkMemoryRequirements memReq;
5046 memReq.memoryTypeBits = UINT32_MAX;
5047 memReq.alignment = 4 * 1024;
5048 memReq.size = 4 * 1024;
5049
5050 VmaAllocationCreateInfo allocCreateInfo = {};
5051 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5052 allocCreateInfo.pool = pool;
5053
5054 constexpr uint32_t allocCount = 100;
5055
5056 std::vector<VmaAllocation> alloc{allocCount};
5057 std::vector<VmaAllocationInfo> allocInfo{allocCount};
5058 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005059 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005060 for(uint32_t i = 0; i < allocCount; ++i)
5061 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005062 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005063 allocInfo[i].pMappedData != nullptr &&
5064 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5065 allocInfo[i].memoryType == allocInfo[0].memoryType);
5066 }
5067
5068 // Free the allocations.
5069 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5070 std::fill(alloc.begin(), alloc.end(), nullptr);
5071 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5072
5073 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5074 // Also test optional allocationInfo = null.
5075 memReq.size = 100 * 1024;
5076 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005077 TEST(res != VK_SUCCESS);
5078 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005079
5080 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5081 memReq.size = 4 * 1024;
5082 memReq.alignment = 128 * 1024;
5083 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005084 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005085
5086 // Make 100 dedicated allocations of 4 KB.
5087 memReq.alignment = 4 * 1024;
5088 memReq.size = 4 * 1024;
5089
5090 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5091 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5092 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5093 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005094 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005095 for(uint32_t i = 0; i < allocCount; ++i)
5096 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005097 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005098 allocInfo[i].pMappedData != nullptr &&
5099 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5100 allocInfo[i].offset == 0);
5101 if(i > 0)
5102 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005103 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005104 }
5105 }
5106
5107 // Free the allocations.
5108 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5109 std::fill(alloc.begin(), alloc.end(), nullptr);
5110 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5111
5112 vmaDestroyPool(g_hAllocator, pool);
5113}
5114
Adam Sawickif2975342018-10-16 13:49:02 +02005115// Test the testing environment.
5116static void TestGpuData()
5117{
5118 RandomNumberGenerator rand = { 53434 };
5119
5120 std::vector<AllocInfo> allocInfo;
5121
5122 for(size_t i = 0; i < 100; ++i)
5123 {
5124 AllocInfo info = {};
5125
5126 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5127 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5128 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5129 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5130 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5131
5132 VmaAllocationCreateInfo allocCreateInfo = {};
5133 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5134
5135 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5136 TEST(res == VK_SUCCESS);
5137
5138 info.m_StartValue = rand.Generate();
5139
5140 allocInfo.push_back(std::move(info));
5141 }
5142
5143 UploadGpuData(allocInfo.data(), allocInfo.size());
5144
5145 ValidateGpuData(allocInfo.data(), allocInfo.size());
5146
5147 DestroyAllAllocations(allocInfo);
5148}
5149
Adam Sawickib8333fb2018-03-13 16:15:53 +01005150void Test()
5151{
5152 wprintf(L"TESTING:\n");
5153
Adam Sawicki5c8af7b2018-12-10 13:34:54 +01005154 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005155 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005156 ////////////////////////////////////////////////////////////////////////////////
5157 // Temporarily insert custom tests here:
Adam Sawicki80927152018-09-07 17:27:23 +02005158
Adam Sawicki70a683e2018-08-24 15:36:32 +02005159 return;
5160 }
5161
Adam Sawickib8333fb2018-03-13 16:15:53 +01005162 // # Simple tests
5163
5164 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005165 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005166#if VMA_DEBUG_MARGIN
5167 TestDebugMargin();
5168#else
5169 TestPool_SameSize();
5170 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01005171 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02005172#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005173#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5174 TestAllocationsInitialization();
5175#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005176 TestMapping();
5177 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005178 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005179 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005180 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005181
Adam Sawicki4338f662018-09-07 14:12:37 +02005182 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005183 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005184
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005185 {
5186 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005187 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005188 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005189 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005190 fclose(file);
5191 }
5192
Adam Sawickib8333fb2018-03-13 16:15:53 +01005193 TestDefragmentationSimple();
5194 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005195 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005196 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005197
5198 // # Detailed tests
5199 FILE* file;
5200 fopen_s(&file, "Results.csv", "w");
5201 assert(file != NULL);
5202
5203 WriteMainTestResultHeader(file);
5204 PerformMainTests(file);
5205 //PerformCustomMainTest(file);
5206
5207 WritePoolTestResultHeader(file);
5208 PerformPoolTests(file);
5209 //PerformCustomPoolTest(file);
5210
5211 fclose(file);
5212
5213 wprintf(L"Done.\n");
5214}
5215
Adam Sawickif1a793c2018-03-13 15:42:22 +01005216#endif // #ifdef _WIN32