blob: cf4c5337480a9f1062abecd6ba9b68b22a4e5836 [file] [log] [blame]
Adam Sawickif1a793c2018-03-13 15:42:22 +01001#include "Tests.h"
2#include "VmaUsage.h"
3#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004#include <atomic>
5#include <thread>
6#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +01007
8#ifdef _WIN32
9
Adam Sawicki33d2ce72018-08-27 13:59:13 +020010static const char* CODE_DESCRIPTION = "Foo";
11
Adam Sawickif2975342018-10-16 13:49:02 +020012extern VkCommandBuffer g_hTemporaryCommandBuffer;
13void BeginSingleTimeCommands();
14void EndSingleTimeCommands();
15
Adam Sawicki0a607132018-08-24 11:18:41 +020016enum CONFIG_TYPE {
17 CONFIG_TYPE_MINIMUM,
18 CONFIG_TYPE_SMALL,
19 CONFIG_TYPE_AVERAGE,
20 CONFIG_TYPE_LARGE,
21 CONFIG_TYPE_MAXIMUM,
22 CONFIG_TYPE_COUNT
23};
24
Adam Sawickif2975342018-10-16 13:49:02 +020025static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
26//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020027
Adam Sawickib8333fb2018-03-13 16:15:53 +010028enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
29
Adam Sawicki0667e332018-08-24 17:26:44 +020030static const char* FREE_ORDER_NAMES[] = {
31 "FORWARD",
32 "BACKWARD",
33 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020034};
35
Adam Sawicki80927152018-09-07 17:27:23 +020036// Copy of internal VmaAlgorithmToStr.
37static const char* AlgorithmToStr(uint32_t algorithm)
38{
39 switch(algorithm)
40 {
41 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
42 return "Linear";
43 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
44 return "Buddy";
45 case 0:
46 return "Default";
47 default:
48 assert(0);
49 return "";
50 }
51}
52
Adam Sawickib8333fb2018-03-13 16:15:53 +010053struct AllocationSize
54{
55 uint32_t Probability;
56 VkDeviceSize BufferSizeMin, BufferSizeMax;
57 uint32_t ImageSizeMin, ImageSizeMax;
58};
59
60struct Config
61{
62 uint32_t RandSeed;
63 VkDeviceSize BeginBytesToAllocate;
64 uint32_t AdditionalOperationCount;
65 VkDeviceSize MaxBytesToAllocate;
66 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
67 std::vector<AllocationSize> AllocationSizes;
68 uint32_t ThreadCount;
69 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
70 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020071 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010072};
73
74struct Result
75{
76 duration TotalTime;
77 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
78 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
79 VkDeviceSize TotalMemoryAllocated;
80 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
81};
82
83void TestDefragmentationSimple();
84void TestDefragmentationFull();
85
86struct PoolTestConfig
87{
88 uint32_t RandSeed;
89 uint32_t ThreadCount;
90 VkDeviceSize PoolSize;
91 uint32_t FrameCount;
92 uint32_t TotalItemCount;
93 // Range for number of items used in each frame.
94 uint32_t UsedItemCountMin, UsedItemCountMax;
95 // Percent of items to make unused, and possibly make some others used in each frame.
96 uint32_t ItemsToMakeUnusedPercent;
97 std::vector<AllocationSize> AllocationSizes;
98
99 VkDeviceSize CalcAvgResourceSize() const
100 {
101 uint32_t probabilitySum = 0;
102 VkDeviceSize sizeSum = 0;
103 for(size_t i = 0; i < AllocationSizes.size(); ++i)
104 {
105 const AllocationSize& allocSize = AllocationSizes[i];
106 if(allocSize.BufferSizeMax > 0)
107 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
108 else
109 {
110 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
111 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
112 }
113 probabilitySum += allocSize.Probability;
114 }
115 return sizeSum / probabilitySum;
116 }
117
118 bool UsesBuffers() const
119 {
120 for(size_t i = 0; i < AllocationSizes.size(); ++i)
121 if(AllocationSizes[i].BufferSizeMax > 0)
122 return true;
123 return false;
124 }
125
126 bool UsesImages() const
127 {
128 for(size_t i = 0; i < AllocationSizes.size(); ++i)
129 if(AllocationSizes[i].ImageSizeMax > 0)
130 return true;
131 return false;
132 }
133};
134
135struct PoolTestResult
136{
137 duration TotalTime;
138 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
139 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
140 size_t LostAllocationCount, LostAllocationTotalSize;
141 size_t FailedAllocationCount, FailedAllocationTotalSize;
142};
143
144static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
145
Adam Sawicki51fa9662018-10-03 13:44:29 +0200146uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200147
Adam Sawickib8333fb2018-03-13 16:15:53 +0100148struct BufferInfo
149{
150 VkBuffer Buffer = VK_NULL_HANDLE;
151 VmaAllocation Allocation = VK_NULL_HANDLE;
152};
153
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200154static uint32_t GetAllocationStrategyCount()
155{
156 uint32_t strategyCount = 0;
157 switch(ConfigType)
158 {
159 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
160 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
161 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
162 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
163 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
164 default: assert(0);
165 }
166 return strategyCount;
167}
168
169static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
170{
171 switch(allocStrategy)
172 {
173 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
174 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
175 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
176 case 0: return "Default"; break;
177 default: assert(0); return "";
178 }
179}
180
Adam Sawickib8333fb2018-03-13 16:15:53 +0100181static void InitResult(Result& outResult)
182{
183 outResult.TotalTime = duration::zero();
184 outResult.AllocationTimeMin = duration::max();
185 outResult.AllocationTimeAvg = duration::zero();
186 outResult.AllocationTimeMax = duration::min();
187 outResult.DeallocationTimeMin = duration::max();
188 outResult.DeallocationTimeAvg = duration::zero();
189 outResult.DeallocationTimeMax = duration::min();
190 outResult.TotalMemoryAllocated = 0;
191 outResult.FreeRangeSizeAvg = 0;
192 outResult.FreeRangeSizeMax = 0;
193}
194
195class TimeRegisterObj
196{
197public:
198 TimeRegisterObj(duration& min, duration& sum, duration& max) :
199 m_Min(min),
200 m_Sum(sum),
201 m_Max(max),
202 m_TimeBeg(std::chrono::high_resolution_clock::now())
203 {
204 }
205
206 ~TimeRegisterObj()
207 {
208 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
209 m_Sum += d;
210 if(d < m_Min) m_Min = d;
211 if(d > m_Max) m_Max = d;
212 }
213
214private:
215 duration& m_Min;
216 duration& m_Sum;
217 duration& m_Max;
218 time_point m_TimeBeg;
219};
220
221struct PoolTestThreadResult
222{
223 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
224 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
225 size_t AllocationCount, DeallocationCount;
226 size_t LostAllocationCount, LostAllocationTotalSize;
227 size_t FailedAllocationCount, FailedAllocationTotalSize;
228};
229
230class AllocationTimeRegisterObj : public TimeRegisterObj
231{
232public:
233 AllocationTimeRegisterObj(Result& result) :
234 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
235 {
236 }
237};
238
239class DeallocationTimeRegisterObj : public TimeRegisterObj
240{
241public:
242 DeallocationTimeRegisterObj(Result& result) :
243 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
244 {
245 }
246};
247
248class PoolAllocationTimeRegisterObj : public TimeRegisterObj
249{
250public:
251 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
252 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
253 {
254 }
255};
256
257class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
258{
259public:
260 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
261 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
262 {
263 }
264};
265
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200266static void CurrentTimeToStr(std::string& out)
267{
268 time_t rawTime; time(&rawTime);
269 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
270 char timeStr[128];
271 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
272 out = timeStr;
273}
274
Adam Sawickib8333fb2018-03-13 16:15:53 +0100275VkResult MainTest(Result& outResult, const Config& config)
276{
277 assert(config.ThreadCount > 0);
278
279 InitResult(outResult);
280
281 RandomNumberGenerator mainRand{config.RandSeed};
282
283 time_point timeBeg = std::chrono::high_resolution_clock::now();
284
285 std::atomic<size_t> allocationCount = 0;
286 VkResult res = VK_SUCCESS;
287
288 uint32_t memUsageProbabilitySum =
289 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
290 config.MemUsageProbability[2] + config.MemUsageProbability[3];
291 assert(memUsageProbabilitySum > 0);
292
293 uint32_t allocationSizeProbabilitySum = std::accumulate(
294 config.AllocationSizes.begin(),
295 config.AllocationSizes.end(),
296 0u,
297 [](uint32_t sum, const AllocationSize& allocSize) {
298 return sum + allocSize.Probability;
299 });
300
301 struct Allocation
302 {
303 VkBuffer Buffer;
304 VkImage Image;
305 VmaAllocation Alloc;
306 };
307
308 std::vector<Allocation> commonAllocations;
309 std::mutex commonAllocationsMutex;
310
311 auto Allocate = [&](
312 VkDeviceSize bufferSize,
313 const VkExtent2D imageExtent,
314 RandomNumberGenerator& localRand,
315 VkDeviceSize& totalAllocatedBytes,
316 std::vector<Allocation>& allocations) -> VkResult
317 {
318 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
319
320 uint32_t memUsageIndex = 0;
321 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
322 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
323 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
324
325 VmaAllocationCreateInfo memReq = {};
326 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200327 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100328
329 Allocation allocation = {};
330 VmaAllocationInfo allocationInfo;
331
332 // Buffer
333 if(bufferSize > 0)
334 {
335 assert(imageExtent.width == 0);
336 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
337 bufferInfo.size = bufferSize;
338 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
339
340 {
341 AllocationTimeRegisterObj timeRegisterObj{outResult};
342 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
343 }
344 }
345 // Image
346 else
347 {
348 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
349 imageInfo.imageType = VK_IMAGE_TYPE_2D;
350 imageInfo.extent.width = imageExtent.width;
351 imageInfo.extent.height = imageExtent.height;
352 imageInfo.extent.depth = 1;
353 imageInfo.mipLevels = 1;
354 imageInfo.arrayLayers = 1;
355 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
356 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
357 VK_IMAGE_TILING_OPTIMAL :
358 VK_IMAGE_TILING_LINEAR;
359 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
360 switch(memReq.usage)
361 {
362 case VMA_MEMORY_USAGE_GPU_ONLY:
363 switch(localRand.Generate() % 3)
364 {
365 case 0:
366 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
367 break;
368 case 1:
369 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
370 break;
371 case 2:
372 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
373 break;
374 }
375 break;
376 case VMA_MEMORY_USAGE_CPU_ONLY:
377 case VMA_MEMORY_USAGE_CPU_TO_GPU:
378 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
379 break;
380 case VMA_MEMORY_USAGE_GPU_TO_CPU:
381 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
382 break;
383 }
384 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
385 imageInfo.flags = 0;
386
387 {
388 AllocationTimeRegisterObj timeRegisterObj{outResult};
389 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
390 }
391 }
392
393 if(res == VK_SUCCESS)
394 {
395 ++allocationCount;
396 totalAllocatedBytes += allocationInfo.size;
397 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
398 if(useCommonAllocations)
399 {
400 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
401 commonAllocations.push_back(allocation);
402 }
403 else
404 allocations.push_back(allocation);
405 }
406 else
407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200408 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100409 }
410 return res;
411 };
412
413 auto GetNextAllocationSize = [&](
414 VkDeviceSize& outBufSize,
415 VkExtent2D& outImageSize,
416 RandomNumberGenerator& localRand)
417 {
418 outBufSize = 0;
419 outImageSize = {0, 0};
420
421 uint32_t allocSizeIndex = 0;
422 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
423 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
424 r -= config.AllocationSizes[allocSizeIndex++].Probability;
425
426 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
427 if(allocSize.BufferSizeMax > 0)
428 {
429 assert(allocSize.ImageSizeMax == 0);
430 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
431 outBufSize = allocSize.BufferSizeMin;
432 else
433 {
434 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
435 outBufSize = outBufSize / 16 * 16;
436 }
437 }
438 else
439 {
440 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
441 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
442 else
443 {
444 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
445 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
446 }
447 }
448 };
449
450 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
451 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
452
453 auto ThreadProc = [&](uint32_t randSeed) -> void
454 {
455 RandomNumberGenerator threadRand(randSeed);
456 VkDeviceSize threadTotalAllocatedBytes = 0;
457 std::vector<Allocation> threadAllocations;
458 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
459 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
460 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
461
462 // BEGIN ALLOCATIONS
463 for(;;)
464 {
465 VkDeviceSize bufferSize = 0;
466 VkExtent2D imageExtent = {};
467 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
468 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
469 threadBeginBytesToAllocate)
470 {
471 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
472 break;
473 }
474 else
475 break;
476 }
477
478 // ADDITIONAL ALLOCATIONS AND FREES
479 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
480 {
481 VkDeviceSize bufferSize = 0;
482 VkExtent2D imageExtent = {};
483 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
484
485 // true = allocate, false = free
486 bool allocate = threadRand.Generate() % 2 != 0;
487
488 if(allocate)
489 {
490 if(threadTotalAllocatedBytes +
491 bufferSize +
492 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
493 threadMaxBytesToAllocate)
494 {
495 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
496 break;
497 }
498 }
499 else
500 {
501 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
502 if(useCommonAllocations)
503 {
504 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
505 if(!commonAllocations.empty())
506 {
507 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
508 VmaAllocationInfo allocationInfo;
509 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
510 if(threadTotalAllocatedBytes >= allocationInfo.size)
511 {
512 DeallocationTimeRegisterObj timeRegisterObj{outResult};
513 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
514 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
515 else
516 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
517 threadTotalAllocatedBytes -= allocationInfo.size;
518 commonAllocations.erase(commonAllocations.begin() + indexToFree);
519 }
520 }
521 }
522 else
523 {
524 if(!threadAllocations.empty())
525 {
526 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
527 VmaAllocationInfo allocationInfo;
528 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
529 if(threadTotalAllocatedBytes >= allocationInfo.size)
530 {
531 DeallocationTimeRegisterObj timeRegisterObj{outResult};
532 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
533 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
534 else
535 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
536 threadTotalAllocatedBytes -= allocationInfo.size;
537 threadAllocations.erase(threadAllocations.begin() + indexToFree);
538 }
539 }
540 }
541 }
542 }
543
544 ++numThreadsReachedMaxAllocations;
545
546 WaitForSingleObject(threadsFinishEvent, INFINITE);
547
548 // DEALLOCATION
549 while(!threadAllocations.empty())
550 {
551 size_t indexToFree = 0;
552 switch(config.FreeOrder)
553 {
554 case FREE_ORDER::FORWARD:
555 indexToFree = 0;
556 break;
557 case FREE_ORDER::BACKWARD:
558 indexToFree = threadAllocations.size() - 1;
559 break;
560 case FREE_ORDER::RANDOM:
561 indexToFree = mainRand.Generate() % threadAllocations.size();
562 break;
563 }
564
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 }
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 };
575
576 uint32_t threadRandSeed = mainRand.Generate();
577 std::vector<std::thread> bkgThreads;
578 for(size_t i = 0; i < config.ThreadCount; ++i)
579 {
580 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
581 }
582
583 // Wait for threads reached max allocations
584 while(numThreadsReachedMaxAllocations < config.ThreadCount)
585 Sleep(0);
586
587 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
588 VmaStats vmaStats = {};
589 vmaCalculateStats(g_hAllocator, &vmaStats);
590 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
591 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
592 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
593
594 // Signal threads to deallocate
595 SetEvent(threadsFinishEvent);
596
597 // Wait for threads finished
598 for(size_t i = 0; i < bkgThreads.size(); ++i)
599 bkgThreads[i].join();
600 bkgThreads.clear();
601
602 CloseHandle(threadsFinishEvent);
603
604 // Deallocate remaining common resources
605 while(!commonAllocations.empty())
606 {
607 size_t indexToFree = 0;
608 switch(config.FreeOrder)
609 {
610 case FREE_ORDER::FORWARD:
611 indexToFree = 0;
612 break;
613 case FREE_ORDER::BACKWARD:
614 indexToFree = commonAllocations.size() - 1;
615 break;
616 case FREE_ORDER::RANDOM:
617 indexToFree = mainRand.Generate() % commonAllocations.size();
618 break;
619 }
620
621 {
622 DeallocationTimeRegisterObj timeRegisterObj{outResult};
623 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
624 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
625 else
626 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
627 }
628 commonAllocations.erase(commonAllocations.begin() + indexToFree);
629 }
630
631 if(allocationCount)
632 {
633 outResult.AllocationTimeAvg /= allocationCount;
634 outResult.DeallocationTimeAvg /= allocationCount;
635 }
636
637 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
638
639 return res;
640}
641
Adam Sawicki51fa9662018-10-03 13:44:29 +0200642void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100643{
644 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200645 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100646 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200647 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100648}
649
650struct AllocInfo
651{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200652 VmaAllocation m_Allocation = VK_NULL_HANDLE;
653 VkBuffer m_Buffer = VK_NULL_HANDLE;
654 VkImage m_Image = VK_NULL_HANDLE;
655 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100656 union
657 {
658 VkBufferCreateInfo m_BufferInfo;
659 VkImageCreateInfo m_ImageInfo;
660 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200661
662 void CreateBuffer(
663 const VkBufferCreateInfo& bufCreateInfo,
664 const VmaAllocationCreateInfo& allocCreateInfo);
665 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100666};
667
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200668void AllocInfo::CreateBuffer(
669 const VkBufferCreateInfo& bufCreateInfo,
670 const VmaAllocationCreateInfo& allocCreateInfo)
671{
672 m_BufferInfo = bufCreateInfo;
673 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
674 TEST(res == VK_SUCCESS);
675}
676
677void AllocInfo::Destroy()
678{
679 if(m_Image)
680 {
681 vkDestroyImage(g_hDevice, m_Image, nullptr);
682 }
683 if(m_Buffer)
684 {
685 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
686 }
687 if(m_Allocation)
688 {
689 vmaFreeMemory(g_hAllocator, m_Allocation);
690 }
691}
692
Adam Sawickif2975342018-10-16 13:49:02 +0200693class StagingBufferCollection
694{
695public:
696 StagingBufferCollection() { }
697 ~StagingBufferCollection();
698 // Returns false if maximum total size of buffers would be exceeded.
699 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
700 void ReleaseAllBuffers();
701
702private:
703 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
704 struct BufInfo
705 {
706 VmaAllocation Allocation = VK_NULL_HANDLE;
707 VkBuffer Buffer = VK_NULL_HANDLE;
708 VkDeviceSize Size = VK_WHOLE_SIZE;
709 void* MappedPtr = nullptr;
710 bool Used = false;
711 };
712 std::vector<BufInfo> m_Bufs;
713 // Including both used and unused.
714 VkDeviceSize m_TotalSize = 0;
715};
716
717StagingBufferCollection::~StagingBufferCollection()
718{
719 for(size_t i = m_Bufs.size(); i--; )
720 {
721 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
722 }
723}
724
725bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
726{
727 assert(size <= MAX_TOTAL_SIZE);
728
729 // Try to find existing unused buffer with best size.
730 size_t bestIndex = SIZE_MAX;
731 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
732 {
733 BufInfo& currBufInfo = m_Bufs[i];
734 if(!currBufInfo.Used && currBufInfo.Size >= size &&
735 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
736 {
737 bestIndex = i;
738 }
739 }
740
741 if(bestIndex != SIZE_MAX)
742 {
743 m_Bufs[bestIndex].Used = true;
744 outBuffer = m_Bufs[bestIndex].Buffer;
745 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
746 return true;
747 }
748
749 // Allocate new buffer with requested size.
750 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
751 {
752 BufInfo bufInfo;
753 bufInfo.Size = size;
754 bufInfo.Used = true;
755
756 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
757 bufCreateInfo.size = size;
758 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
759
760 VmaAllocationCreateInfo allocCreateInfo = {};
761 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
762 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
763
764 VmaAllocationInfo allocInfo;
765 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
766 bufInfo.MappedPtr = allocInfo.pMappedData;
767 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
768
769 outBuffer = bufInfo.Buffer;
770 outMappedPtr = bufInfo.MappedPtr;
771
772 m_Bufs.push_back(std::move(bufInfo));
773
774 m_TotalSize += size;
775
776 return true;
777 }
778
779 // There are some unused but smaller buffers: Free them and try again.
780 bool hasUnused = false;
781 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
782 {
783 if(!m_Bufs[i].Used)
784 {
785 hasUnused = true;
786 break;
787 }
788 }
789 if(hasUnused)
790 {
791 for(size_t i = m_Bufs.size(); i--; )
792 {
793 if(!m_Bufs[i].Used)
794 {
795 m_TotalSize -= m_Bufs[i].Size;
796 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
797 m_Bufs.erase(m_Bufs.begin() + i);
798 }
799 }
800
801 return AcquireBuffer(size, outBuffer, outMappedPtr);
802 }
803
804 return false;
805}
806
807void StagingBufferCollection::ReleaseAllBuffers()
808{
809 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
810 {
811 m_Bufs[i].Used = false;
812 }
813}
814
815static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
816{
817 StagingBufferCollection stagingBufs;
818
819 bool cmdBufferStarted = false;
820 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
821 {
822 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
823 if(currAllocInfo.m_Buffer)
824 {
825 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
826
827 VkBuffer stagingBuf = VK_NULL_HANDLE;
828 void* stagingBufMappedPtr = nullptr;
829 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
830 {
831 TEST(cmdBufferStarted);
832 EndSingleTimeCommands();
833 stagingBufs.ReleaseAllBuffers();
834 cmdBufferStarted = false;
835
836 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
837 TEST(ok);
838 }
839
840 // Fill staging buffer.
841 {
842 assert(size % sizeof(uint32_t) == 0);
843 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
844 uint32_t val = currAllocInfo.m_StartValue;
845 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
846 {
847 *stagingValPtr = val;
848 ++stagingValPtr;
849 ++val;
850 }
851 }
852
853 // Issue copy command from staging buffer to destination buffer.
854 if(!cmdBufferStarted)
855 {
856 cmdBufferStarted = true;
857 BeginSingleTimeCommands();
858 }
859
860 VkBufferCopy copy = {};
861 copy.srcOffset = 0;
862 copy.dstOffset = 0;
863 copy.size = size;
864 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
865 }
866 else
867 {
868 TEST(0 && "Images not currently supported.");
869 }
870 }
871
872 if(cmdBufferStarted)
873 {
874 EndSingleTimeCommands();
875 stagingBufs.ReleaseAllBuffers();
876 }
877}
878
879static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
880{
881 StagingBufferCollection stagingBufs;
882
883 bool cmdBufferStarted = false;
884 size_t validateAllocIndexOffset = 0;
885 std::vector<void*> validateStagingBuffers;
886 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
887 {
888 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
889 if(currAllocInfo.m_Buffer)
890 {
891 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
892
893 VkBuffer stagingBuf = VK_NULL_HANDLE;
894 void* stagingBufMappedPtr = nullptr;
895 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
896 {
897 TEST(cmdBufferStarted);
898 EndSingleTimeCommands();
899 cmdBufferStarted = false;
900
901 for(size_t validateIndex = 0;
902 validateIndex < validateStagingBuffers.size();
903 ++validateIndex)
904 {
905 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
906 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
907 TEST(validateSize % sizeof(uint32_t) == 0);
908 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
909 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
910 bool valid = true;
911 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
912 {
913 if(*stagingValPtr != val)
914 {
915 valid = false;
916 break;
917 }
918 ++stagingValPtr;
919 ++val;
920 }
921 TEST(valid);
922 }
923
924 stagingBufs.ReleaseAllBuffers();
925
926 validateAllocIndexOffset = allocInfoIndex;
927 validateStagingBuffers.clear();
928
929 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
930 TEST(ok);
931 }
932
933 // Issue copy command from staging buffer to destination buffer.
934 if(!cmdBufferStarted)
935 {
936 cmdBufferStarted = true;
937 BeginSingleTimeCommands();
938 }
939
940 VkBufferCopy copy = {};
941 copy.srcOffset = 0;
942 copy.dstOffset = 0;
943 copy.size = size;
944 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
945
946 // Sava mapped pointer for later validation.
947 validateStagingBuffers.push_back(stagingBufMappedPtr);
948 }
949 else
950 {
951 TEST(0 && "Images not currently supported.");
952 }
953 }
954
955 if(cmdBufferStarted)
956 {
957 EndSingleTimeCommands();
958
959 for(size_t validateIndex = 0;
960 validateIndex < validateStagingBuffers.size();
961 ++validateIndex)
962 {
963 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
964 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
965 TEST(validateSize % sizeof(uint32_t) == 0);
966 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
967 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
968 bool valid = true;
969 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
970 {
971 if(*stagingValPtr != val)
972 {
973 valid = false;
974 break;
975 }
976 ++stagingValPtr;
977 ++val;
978 }
979 TEST(valid);
980 }
981
982 stagingBufs.ReleaseAllBuffers();
983 }
984}
985
Adam Sawickib8333fb2018-03-13 16:15:53 +0100986static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
987{
988 outMemReq = {};
989 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
990 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
991}
992
993static void CreateBuffer(
994 VmaPool pool,
995 const VkBufferCreateInfo& bufCreateInfo,
996 bool persistentlyMapped,
997 AllocInfo& outAllocInfo)
998{
999 outAllocInfo = {};
1000 outAllocInfo.m_BufferInfo = bufCreateInfo;
1001
1002 VmaAllocationCreateInfo allocCreateInfo = {};
1003 allocCreateInfo.pool = pool;
1004 if(persistentlyMapped)
1005 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1006
1007 VmaAllocationInfo vmaAllocInfo = {};
1008 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1009
1010 // Setup StartValue and fill.
1011 {
1012 outAllocInfo.m_StartValue = (uint32_t)rand();
1013 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001014 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015 if(!persistentlyMapped)
1016 {
1017 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1018 }
1019
1020 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001021 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001022 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1023 data[i] = value++;
1024
1025 if(!persistentlyMapped)
1026 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1027 }
1028}
1029
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001030static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001031{
1032 outAllocation.m_Allocation = nullptr;
1033 outAllocation.m_Buffer = nullptr;
1034 outAllocation.m_Image = nullptr;
1035 outAllocation.m_StartValue = (uint32_t)rand();
1036
1037 VmaAllocationCreateInfo vmaMemReq;
1038 GetMemReq(vmaMemReq);
1039
1040 VmaAllocationInfo allocInfo;
1041
1042 const bool isBuffer = true;//(rand() & 0x1) != 0;
1043 const bool isLarge = (rand() % 16) == 0;
1044 if(isBuffer)
1045 {
1046 const uint32_t bufferSize = isLarge ?
1047 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1048 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1049
1050 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1051 bufferInfo.size = bufferSize;
1052 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1053
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001054 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001055 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001056 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001057 }
1058 else
1059 {
1060 const uint32_t imageSizeX = isLarge ?
1061 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1062 rand() % 1024 + 1; // 1 ... 1024
1063 const uint32_t imageSizeY = isLarge ?
1064 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1065 rand() % 1024 + 1; // 1 ... 1024
1066
1067 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1068 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1069 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1070 imageInfo.extent.width = imageSizeX;
1071 imageInfo.extent.height = imageSizeY;
1072 imageInfo.extent.depth = 1;
1073 imageInfo.mipLevels = 1;
1074 imageInfo.arrayLayers = 1;
1075 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1076 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1077 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1078 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1079
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001080 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001081 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001082 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 }
1084
1085 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1086 if(allocInfo.pMappedData == nullptr)
1087 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001088 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001089 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001090 }
1091
1092 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001093 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001094 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1095 data[i] = value++;
1096
1097 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001098 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001099}
1100
1101static void DestroyAllocation(const AllocInfo& allocation)
1102{
1103 if(allocation.m_Buffer)
1104 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1105 else
1106 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1107}
1108
1109static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1110{
1111 for(size_t i = allocations.size(); i--; )
1112 DestroyAllocation(allocations[i]);
1113 allocations.clear();
1114}
1115
1116static void ValidateAllocationData(const AllocInfo& allocation)
1117{
1118 VmaAllocationInfo allocInfo;
1119 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1120
1121 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1122 if(allocInfo.pMappedData == nullptr)
1123 {
1124 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126 }
1127
1128 uint32_t value = allocation.m_StartValue;
1129 bool ok = true;
1130 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001131 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001132 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1133 {
1134 if(data[i] != value++)
1135 {
1136 ok = false;
1137 break;
1138 }
1139 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001140 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001141
1142 if(allocInfo.pMappedData == nullptr)
1143 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1144}
1145
1146static void RecreateAllocationResource(AllocInfo& allocation)
1147{
1148 VmaAllocationInfo allocInfo;
1149 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1150
1151 if(allocation.m_Buffer)
1152 {
1153 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1154
1155 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001156 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001157
1158 // Just to silence validation layer warnings.
1159 VkMemoryRequirements vkMemReq;
1160 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001161 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001162
1163 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001164 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001165 }
1166 else
1167 {
1168 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1169
1170 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001171 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001172
1173 // Just to silence validation layer warnings.
1174 VkMemoryRequirements vkMemReq;
1175 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1176
1177 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001178 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001179 }
1180}
1181
1182static void Defragment(AllocInfo* allocs, size_t allocCount,
1183 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1184 VmaDefragmentationStats* defragmentationStats = nullptr)
1185{
1186 std::vector<VmaAllocation> vmaAllocs(allocCount);
1187 for(size_t i = 0; i < allocCount; ++i)
1188 vmaAllocs[i] = allocs[i].m_Allocation;
1189
1190 std::vector<VkBool32> allocChanged(allocCount);
1191
1192 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1193 defragmentationInfo, defragmentationStats) );
1194
1195 for(size_t i = 0; i < allocCount; ++i)
1196 {
1197 if(allocChanged[i])
1198 {
1199 RecreateAllocationResource(allocs[i]);
1200 }
1201 }
1202}
1203
1204static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1205{
1206 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1207 ValidateAllocationData(allocInfo);
1208 });
1209}
1210
1211void TestDefragmentationSimple()
1212{
1213 wprintf(L"Test defragmentation simple\n");
1214
1215 RandomNumberGenerator rand(667);
1216
1217 const VkDeviceSize BUF_SIZE = 0x10000;
1218 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1219
1220 const VkDeviceSize MIN_BUF_SIZE = 32;
1221 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1222 auto RandomBufSize = [&]() -> VkDeviceSize {
1223 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1224 };
1225
1226 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1227 bufCreateInfo.size = BUF_SIZE;
1228 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1229
1230 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1231 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1232
1233 uint32_t memTypeIndex = UINT32_MAX;
1234 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1235
1236 VmaPoolCreateInfo poolCreateInfo = {};
1237 poolCreateInfo.blockSize = BLOCK_SIZE;
1238 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1239
1240 VmaPool pool;
1241 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1242
Adam Sawickie1681912018-11-23 17:50:12 +01001243 // Defragmentation of empty pool.
1244 {
1245 VmaDefragmentationInfo2 defragInfo = {};
1246 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1247 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1248 defragInfo.poolCount = 1;
1249 defragInfo.pPools = &pool;
1250
1251 VmaDefragmentationStats defragStats = {};
1252 VmaDefragmentationContext defragCtx = nullptr;
1253 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1254 TEST(res >= VK_SUCCESS);
1255 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1256 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1257 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1258 }
1259
Adam Sawickib8333fb2018-03-13 16:15:53 +01001260 std::vector<AllocInfo> allocations;
1261
1262 // persistentlyMappedOption = 0 - not persistently mapped.
1263 // persistentlyMappedOption = 1 - persistently mapped.
1264 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1265 {
1266 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1267 const bool persistentlyMapped = persistentlyMappedOption != 0;
1268
1269 // # Test 1
1270 // Buffers of fixed size.
1271 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1272 // Expected result: at least 1 block freed.
1273 {
1274 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1275 {
1276 AllocInfo allocInfo;
1277 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1278 allocations.push_back(allocInfo);
1279 }
1280
1281 for(size_t i = 1; i < allocations.size(); ++i)
1282 {
1283 DestroyAllocation(allocations[i]);
1284 allocations.erase(allocations.begin() + i);
1285 }
1286
1287 VmaDefragmentationStats defragStats;
1288 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001289 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1290 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001291
1292 ValidateAllocationsData(allocations.data(), allocations.size());
1293
1294 DestroyAllAllocations(allocations);
1295 }
1296
1297 // # Test 2
1298 // Buffers of fixed size.
1299 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1300 // Expected result: Each of 4 interations makes some progress.
1301 {
1302 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1303 {
1304 AllocInfo allocInfo;
1305 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1306 allocations.push_back(allocInfo);
1307 }
1308
1309 for(size_t i = 1; i < allocations.size(); ++i)
1310 {
1311 DestroyAllocation(allocations[i]);
1312 allocations.erase(allocations.begin() + i);
1313 }
1314
1315 VmaDefragmentationInfo defragInfo = {};
1316 defragInfo.maxAllocationsToMove = 1;
1317 defragInfo.maxBytesToMove = BUF_SIZE;
1318
1319 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1320 {
1321 VmaDefragmentationStats defragStats;
1322 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001323 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001324 }
1325
1326 ValidateAllocationsData(allocations.data(), allocations.size());
1327
1328 DestroyAllAllocations(allocations);
1329 }
1330
1331 // # Test 3
1332 // Buffers of variable size.
1333 // Create a number of buffers. Remove some percent of them.
1334 // Defragment while having some percent of them unmovable.
1335 // Expected result: Just simple validation.
1336 {
1337 for(size_t i = 0; i < 100; ++i)
1338 {
1339 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1340 localBufCreateInfo.size = RandomBufSize();
1341
1342 AllocInfo allocInfo;
1343 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1344 allocations.push_back(allocInfo);
1345 }
1346
1347 const uint32_t percentToDelete = 60;
1348 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1349 for(size_t i = 0; i < numberToDelete; ++i)
1350 {
1351 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1352 DestroyAllocation(allocations[indexToDelete]);
1353 allocations.erase(allocations.begin() + indexToDelete);
1354 }
1355
1356 // Non-movable allocations will be at the beginning of allocations array.
1357 const uint32_t percentNonMovable = 20;
1358 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1359 for(size_t i = 0; i < numberNonMovable; ++i)
1360 {
1361 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1362 if(indexNonMovable != i)
1363 std::swap(allocations[i], allocations[indexNonMovable]);
1364 }
1365
1366 VmaDefragmentationStats defragStats;
1367 Defragment(
1368 allocations.data() + numberNonMovable,
1369 allocations.size() - numberNonMovable,
1370 nullptr, &defragStats);
1371
1372 ValidateAllocationsData(allocations.data(), allocations.size());
1373
1374 DestroyAllAllocations(allocations);
1375 }
1376 }
1377
Adam Sawicki647cf242018-11-23 17:58:00 +01001378 /*
1379 Allocation that must be move to an overlapping place using memmove().
1380 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1381 */
1382 {
1383 AllocInfo allocInfo[2];
1384
1385 bufCreateInfo.size = BUF_SIZE;
1386 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1387 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1388 bufCreateInfo.size = biggerBufSize;
1389 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1390
1391 DestroyAllocation(allocInfo[0]);
1392
1393 VmaDefragmentationStats defragStats;
1394 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1395 // If this fails, it means we couldn't do memmove with overlapping regions.
1396 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1397
1398 ValidateAllocationsData(&allocInfo[1], 1);
1399 DestroyAllocation(allocInfo[1]);
1400 }
1401
Adam Sawickib8333fb2018-03-13 16:15:53 +01001402 vmaDestroyPool(g_hAllocator, pool);
1403}
1404
Adam Sawicki52076eb2018-11-22 16:14:50 +01001405void TestDefragmentationWholePool()
1406{
1407 wprintf(L"Test defragmentation whole pool\n");
1408
1409 RandomNumberGenerator rand(668);
1410
1411 const VkDeviceSize BUF_SIZE = 0x10000;
1412 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1413
1414 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1415 bufCreateInfo.size = BUF_SIZE;
1416 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1417
1418 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1419 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1420
1421 uint32_t memTypeIndex = UINT32_MAX;
1422 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1423
1424 VmaPoolCreateInfo poolCreateInfo = {};
1425 poolCreateInfo.blockSize = BLOCK_SIZE;
1426 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1427
1428 VmaDefragmentationStats defragStats[2];
1429 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1430 {
1431 VmaPool pool;
1432 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1433
1434 std::vector<AllocInfo> allocations;
1435
1436 // Buffers of fixed size.
1437 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1438 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1439 {
1440 AllocInfo allocInfo;
1441 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1442 allocations.push_back(allocInfo);
1443 }
1444
1445 for(size_t i = 1; i < allocations.size(); ++i)
1446 {
1447 DestroyAllocation(allocations[i]);
1448 allocations.erase(allocations.begin() + i);
1449 }
1450
1451 VmaDefragmentationInfo2 defragInfo = {};
1452 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1453 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1454 std::vector<VmaAllocation> allocationsToDefrag;
1455 if(caseIndex == 0)
1456 {
1457 defragInfo.poolCount = 1;
1458 defragInfo.pPools = &pool;
1459 }
1460 else
1461 {
1462 const size_t allocCount = allocations.size();
1463 allocationsToDefrag.resize(allocCount);
1464 std::transform(
1465 allocations.begin(), allocations.end(),
1466 allocationsToDefrag.begin(),
1467 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1468 defragInfo.allocationCount = (uint32_t)allocCount;
1469 defragInfo.pAllocations = allocationsToDefrag.data();
1470 }
1471
1472 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1473 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1474 TEST(res >= VK_SUCCESS);
1475 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1476
1477 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1478
1479 ValidateAllocationsData(allocations.data(), allocations.size());
1480
1481 DestroyAllAllocations(allocations);
1482
1483 vmaDestroyPool(g_hAllocator, pool);
1484 }
1485
1486 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1487 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1488 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1489 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1490}
1491
Adam Sawickib8333fb2018-03-13 16:15:53 +01001492void TestDefragmentationFull()
1493{
1494 std::vector<AllocInfo> allocations;
1495
1496 // Create initial allocations.
1497 for(size_t i = 0; i < 400; ++i)
1498 {
1499 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001500 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001501 allocations.push_back(allocation);
1502 }
1503
1504 // Delete random allocations
1505 const size_t allocationsToDeletePercent = 80;
1506 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1507 for(size_t i = 0; i < allocationsToDelete; ++i)
1508 {
1509 size_t index = (size_t)rand() % allocations.size();
1510 DestroyAllocation(allocations[index]);
1511 allocations.erase(allocations.begin() + index);
1512 }
1513
1514 for(size_t i = 0; i < allocations.size(); ++i)
1515 ValidateAllocationData(allocations[i]);
1516
Adam Sawicki0667e332018-08-24 17:26:44 +02001517 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001518
1519 {
1520 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1521 for(size_t i = 0; i < allocations.size(); ++i)
1522 vmaAllocations[i] = allocations[i].m_Allocation;
1523
1524 const size_t nonMovablePercent = 0;
1525 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1526 for(size_t i = 0; i < nonMovableCount; ++i)
1527 {
1528 size_t index = (size_t)rand() % vmaAllocations.size();
1529 vmaAllocations.erase(vmaAllocations.begin() + index);
1530 }
1531
1532 const uint32_t defragCount = 1;
1533 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1534 {
1535 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1536
1537 VmaDefragmentationInfo defragmentationInfo;
1538 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1539 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1540
1541 wprintf(L"Defragmentation #%u\n", defragIndex);
1542
1543 time_point begTime = std::chrono::high_resolution_clock::now();
1544
1545 VmaDefragmentationStats stats;
1546 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001547 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001548
1549 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1550
1551 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1552 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1553 wprintf(L"Time: %.2f s\n", defragmentDuration);
1554
1555 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1556 {
1557 if(allocationsChanged[i])
1558 {
1559 RecreateAllocationResource(allocations[i]);
1560 }
1561 }
1562
1563 for(size_t i = 0; i < allocations.size(); ++i)
1564 ValidateAllocationData(allocations[i]);
1565
Adam Sawicki0667e332018-08-24 17:26:44 +02001566 //wchar_t fileName[MAX_PATH];
1567 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1568 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001569 }
1570 }
1571
1572 // Destroy all remaining allocations.
1573 DestroyAllAllocations(allocations);
1574}
1575
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001576static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001577{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001578 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001579 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001580
1581 std::vector<AllocInfo> allocations;
1582
1583 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001584 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1585 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001586 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001587 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1588 const size_t percentToLeave = 30;
1589 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001590 RandomNumberGenerator rand = { 234522 };
1591
1592 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001593
1594 VmaAllocationCreateInfo allocCreateInfo = {};
1595 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001596 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001597
1598 // Create all intended buffers.
1599 for(size_t i = 0; i < bufCount; ++i)
1600 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001601 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1602
1603 if(rand.Generate() % 100 < percentNonMovable)
1604 {
1605 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1606 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1607 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1608 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1609 }
1610 else
1611 {
1612 // Different usage just to see different color in output from VmaDumpVis.
1613 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1614 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1615 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1616 // And in JSON dump.
1617 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1618 }
1619
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001620 AllocInfo alloc;
1621 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1622 alloc.m_StartValue = rand.Generate();
1623 allocations.push_back(alloc);
1624 }
1625
1626 // Destroy some percentage of them.
1627 {
1628 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1629 for(size_t i = 0; i < buffersToDestroy; ++i)
1630 {
1631 const size_t index = rand.Generate() % allocations.size();
1632 allocations[index].Destroy();
1633 allocations.erase(allocations.begin() + index);
1634 }
1635 }
1636
1637 // Fill them with meaningful data.
1638 UploadGpuData(allocations.data(), allocations.size());
1639
Adam Sawickic6ede152018-11-16 17:04:14 +01001640 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001641 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001642 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001643
1644 // Defragment using GPU only.
1645 {
1646 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001647
Adam Sawickic6ede152018-11-16 17:04:14 +01001648 std::vector<VmaAllocation> allocationPtrs;
1649 std::vector<VkBool32> allocationChanged;
1650 std::vector<size_t> allocationOriginalIndex;
1651
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001652 for(size_t i = 0; i < allocCount; ++i)
1653 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001654 VmaAllocationInfo allocInfo = {};
1655 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1656 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1657 {
1658 allocationPtrs.push_back(allocations[i].m_Allocation);
1659 allocationChanged.push_back(VK_FALSE);
1660 allocationOriginalIndex.push_back(i);
1661 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001662 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001663
1664 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001665
1666 BeginSingleTimeCommands();
1667
1668 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001669 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001670 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001671 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001672 defragInfo.pAllocationsChanged = allocationChanged.data();
1673 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001674 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1675 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1676
1677 VmaDefragmentationStats stats = {};
1678 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1679 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1680 TEST(res >= VK_SUCCESS);
1681
1682 EndSingleTimeCommands();
1683
1684 vmaDefragmentationEnd(g_hAllocator, ctx);
1685
Adam Sawickic6ede152018-11-16 17:04:14 +01001686 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001687 {
1688 if(allocationChanged[i])
1689 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001690 const size_t origAllocIndex = allocationOriginalIndex[i];
1691 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001692 }
1693 }
1694
Adam Sawicki440307e2018-10-18 15:05:19 +02001695 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1696 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001697 }
1698
1699 ValidateGpuData(allocations.data(), allocations.size());
1700
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001701 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001702 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001703
1704 // Destroy all remaining buffers.
1705 for(size_t i = allocations.size(); i--; )
1706 {
1707 allocations[i].Destroy();
1708 }
Adam Sawicki05704002018-11-08 16:07:29 +01001709
1710 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001711}
1712
Adam Sawickib8333fb2018-03-13 16:15:53 +01001713static void TestUserData()
1714{
1715 VkResult res;
1716
1717 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1718 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1719 bufCreateInfo.size = 0x10000;
1720
1721 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1722 {
1723 // Opaque pointer
1724 {
1725
1726 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1727 void* pointerToSomething = &res;
1728
1729 VmaAllocationCreateInfo allocCreateInfo = {};
1730 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1731 allocCreateInfo.pUserData = numberAsPointer;
1732 if(testIndex == 1)
1733 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1734
1735 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1736 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001737 TEST(res == VK_SUCCESS);
1738 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001739
1740 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001741 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001742
1743 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1744 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001745 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001746
1747 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1748 }
1749
1750 // String
1751 {
1752 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1753 const char* name2 = "2";
1754 const size_t name1Len = strlen(name1);
1755
1756 char* name1Buf = new char[name1Len + 1];
1757 strcpy_s(name1Buf, name1Len + 1, name1);
1758
1759 VmaAllocationCreateInfo allocCreateInfo = {};
1760 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1761 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1762 allocCreateInfo.pUserData = name1Buf;
1763 if(testIndex == 1)
1764 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1765
1766 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1767 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001768 TEST(res == VK_SUCCESS);
1769 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1770 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001771
1772 delete[] name1Buf;
1773
1774 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001775 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001776
1777 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1778 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001779 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001780
1781 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1782 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001783 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001784
1785 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1786 }
1787 }
1788}
1789
Adam Sawicki370ab182018-11-08 16:31:00 +01001790static void TestInvalidAllocations()
1791{
1792 VkResult res;
1793
1794 VmaAllocationCreateInfo allocCreateInfo = {};
1795 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1796
1797 // Try to allocate 0 bytes.
1798 {
1799 VkMemoryRequirements memReq = {};
1800 memReq.size = 0; // !!!
1801 memReq.alignment = 4;
1802 memReq.memoryTypeBits = UINT32_MAX;
1803 VmaAllocation alloc = VK_NULL_HANDLE;
1804 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1805 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1806 }
1807
1808 // Try to create buffer with size = 0.
1809 {
1810 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1811 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1812 bufCreateInfo.size = 0; // !!!
1813 VkBuffer buf = VK_NULL_HANDLE;
1814 VmaAllocation alloc = VK_NULL_HANDLE;
1815 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1816 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1817 }
1818
1819 // Try to create image with one dimension = 0.
1820 {
1821 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1822 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1823 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1824 imageCreateInfo.extent.width = 128;
1825 imageCreateInfo.extent.height = 0; // !!!
1826 imageCreateInfo.extent.depth = 1;
1827 imageCreateInfo.mipLevels = 1;
1828 imageCreateInfo.arrayLayers = 1;
1829 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1830 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1831 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1832 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1833 VkImage image = VK_NULL_HANDLE;
1834 VmaAllocation alloc = VK_NULL_HANDLE;
1835 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1836 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1837 }
1838}
1839
Adam Sawickib8333fb2018-03-13 16:15:53 +01001840static void TestMemoryRequirements()
1841{
1842 VkResult res;
1843 VkBuffer buf;
1844 VmaAllocation alloc;
1845 VmaAllocationInfo allocInfo;
1846
1847 const VkPhysicalDeviceMemoryProperties* memProps;
1848 vmaGetMemoryProperties(g_hAllocator, &memProps);
1849
1850 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1851 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1852 bufInfo.size = 128;
1853
1854 VmaAllocationCreateInfo allocCreateInfo = {};
1855
1856 // No requirements.
1857 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001858 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001859 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1860
1861 // Usage.
1862 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1863 allocCreateInfo.requiredFlags = 0;
1864 allocCreateInfo.preferredFlags = 0;
1865 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1866
1867 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001868 TEST(res == VK_SUCCESS);
1869 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001870 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1871
1872 // Required flags, preferred flags.
1873 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1874 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1875 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1876 allocCreateInfo.memoryTypeBits = 0;
1877
1878 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001879 TEST(res == VK_SUCCESS);
1880 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1881 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001882 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1883
1884 // memoryTypeBits.
1885 const uint32_t memType = allocInfo.memoryType;
1886 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1887 allocCreateInfo.requiredFlags = 0;
1888 allocCreateInfo.preferredFlags = 0;
1889 allocCreateInfo.memoryTypeBits = 1u << memType;
1890
1891 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001892 TEST(res == VK_SUCCESS);
1893 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001894 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1895
1896}
1897
1898static void TestBasics()
1899{
1900 VkResult res;
1901
1902 TestMemoryRequirements();
1903
1904 // Lost allocation
1905 {
1906 VmaAllocation alloc = VK_NULL_HANDLE;
1907 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001908 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001909
1910 VmaAllocationInfo allocInfo;
1911 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001912 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1913 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001914
1915 vmaFreeMemory(g_hAllocator, alloc);
1916 }
1917
1918 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1919 {
1920 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1921 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1922 bufCreateInfo.size = 128;
1923
1924 VmaAllocationCreateInfo allocCreateInfo = {};
1925 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1926 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1927
1928 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1929 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001930 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001931
1932 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1933
1934 // Same with OWN_MEMORY.
1935 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1936
1937 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001938 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001939
1940 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1941 }
1942
1943 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001944
1945 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001946}
1947
1948void TestHeapSizeLimit()
1949{
1950 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1951 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1952
1953 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1954 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1955 {
1956 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1957 }
1958
1959 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1960 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1961 allocatorCreateInfo.device = g_hDevice;
1962 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1963
1964 VmaAllocator hAllocator;
1965 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001966 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001967
1968 struct Item
1969 {
1970 VkBuffer hBuf;
1971 VmaAllocation hAlloc;
1972 };
1973 std::vector<Item> items;
1974
1975 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1976 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1977
1978 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
1979 VmaAllocationInfo ownAllocInfo;
1980 {
1981 VmaAllocationCreateInfo allocCreateInfo = {};
1982 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1983 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1984
1985 bufCreateInfo.size = BLOCK_SIZE / 2;
1986
1987 for(size_t i = 0; i < 2; ++i)
1988 {
1989 Item item;
1990 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001991 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001992 items.push_back(item);
1993 }
1994 }
1995
1996 // Create pool to make sure allocations must be out of this memory type.
1997 VmaPoolCreateInfo poolCreateInfo = {};
1998 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
1999 poolCreateInfo.blockSize = BLOCK_SIZE;
2000
2001 VmaPool hPool;
2002 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002003 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002004
2005 // 2. Allocate normal buffers from all the remaining memory.
2006 {
2007 VmaAllocationCreateInfo allocCreateInfo = {};
2008 allocCreateInfo.pool = hPool;
2009
2010 bufCreateInfo.size = BLOCK_SIZE / 2;
2011
2012 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2013 for(size_t i = 0; i < bufCount; ++i)
2014 {
2015 Item item;
2016 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002017 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002018 items.push_back(item);
2019 }
2020 }
2021
2022 // 3. Allocation of one more (even small) buffer should fail.
2023 {
2024 VmaAllocationCreateInfo allocCreateInfo = {};
2025 allocCreateInfo.pool = hPool;
2026
2027 bufCreateInfo.size = 128;
2028
2029 VkBuffer hBuf;
2030 VmaAllocation hAlloc;
2031 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002032 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002033 }
2034
2035 // Destroy everything.
2036 for(size_t i = items.size(); i--; )
2037 {
2038 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2039 }
2040
2041 vmaDestroyPool(hAllocator, hPool);
2042
2043 vmaDestroyAllocator(hAllocator);
2044}
2045
Adam Sawicki212a4a62018-06-14 15:44:45 +02002046#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002047static void TestDebugMargin()
2048{
2049 if(VMA_DEBUG_MARGIN == 0)
2050 {
2051 return;
2052 }
2053
2054 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002055 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002056
2057 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002058 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002059
2060 // Create few buffers of different size.
2061 const size_t BUF_COUNT = 10;
2062 BufferInfo buffers[BUF_COUNT];
2063 VmaAllocationInfo allocInfo[BUF_COUNT];
2064 for(size_t i = 0; i < 10; ++i)
2065 {
2066 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002067 // Last one will be mapped.
2068 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002069
2070 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002071 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002072 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002073 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002074
2075 if(i == BUF_COUNT - 1)
2076 {
2077 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002078 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002079 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2080 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2081 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002082 }
2083
2084 // Check if their offsets preserve margin between them.
2085 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2086 {
2087 if(lhs.deviceMemory != rhs.deviceMemory)
2088 {
2089 return lhs.deviceMemory < rhs.deviceMemory;
2090 }
2091 return lhs.offset < rhs.offset;
2092 });
2093 for(size_t i = 1; i < BUF_COUNT; ++i)
2094 {
2095 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2096 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002097 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002098 }
2099 }
2100
Adam Sawicki212a4a62018-06-14 15:44:45 +02002101 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002102 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002103
Adam Sawicki73b16652018-06-11 16:39:25 +02002104 // Destroy all buffers.
2105 for(size_t i = BUF_COUNT; i--; )
2106 {
2107 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2108 }
2109}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002110#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002111
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002112static void TestLinearAllocator()
2113{
2114 wprintf(L"Test linear allocator\n");
2115
2116 RandomNumberGenerator rand{645332};
2117
2118 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2119 sampleBufCreateInfo.size = 1024; // Whatever.
2120 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2121
2122 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2123 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2124
2125 VmaPoolCreateInfo poolCreateInfo = {};
2126 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002127 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002128
Adam Sawickiee082772018-06-20 17:45:49 +02002129 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002130 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2131 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2132
2133 VmaPool pool = nullptr;
2134 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002135 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002136
2137 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2138
2139 VmaAllocationCreateInfo allocCreateInfo = {};
2140 allocCreateInfo.pool = pool;
2141
2142 constexpr size_t maxBufCount = 100;
2143 std::vector<BufferInfo> bufInfo;
2144
2145 constexpr VkDeviceSize bufSizeMin = 16;
2146 constexpr VkDeviceSize bufSizeMax = 1024;
2147 VmaAllocationInfo allocInfo;
2148 VkDeviceSize prevOffset = 0;
2149
2150 // Test one-time free.
2151 for(size_t i = 0; i < 2; ++i)
2152 {
2153 // Allocate number of buffers of varying size that surely fit into this block.
2154 VkDeviceSize bufSumSize = 0;
2155 for(size_t i = 0; i < maxBufCount; ++i)
2156 {
2157 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2158 BufferInfo newBufInfo;
2159 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2160 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002161 TEST(res == VK_SUCCESS);
2162 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002163 bufInfo.push_back(newBufInfo);
2164 prevOffset = allocInfo.offset;
2165 bufSumSize += bufCreateInfo.size;
2166 }
2167
2168 // Validate pool stats.
2169 VmaPoolStats stats;
2170 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002171 TEST(stats.size == poolCreateInfo.blockSize);
2172 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2173 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002174
2175 // Destroy the buffers in random order.
2176 while(!bufInfo.empty())
2177 {
2178 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2179 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2180 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2181 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2182 }
2183 }
2184
2185 // Test stack.
2186 {
2187 // Allocate number of buffers of varying size that surely fit into this block.
2188 for(size_t i = 0; i < maxBufCount; ++i)
2189 {
2190 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2191 BufferInfo newBufInfo;
2192 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2193 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002194 TEST(res == VK_SUCCESS);
2195 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002196 bufInfo.push_back(newBufInfo);
2197 prevOffset = allocInfo.offset;
2198 }
2199
2200 // Destroy few buffers from top of the stack.
2201 for(size_t i = 0; i < maxBufCount / 5; ++i)
2202 {
2203 const BufferInfo& currBufInfo = bufInfo.back();
2204 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2205 bufInfo.pop_back();
2206 }
2207
2208 // Create some more
2209 for(size_t i = 0; i < maxBufCount / 5; ++i)
2210 {
2211 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2212 BufferInfo newBufInfo;
2213 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2214 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002215 TEST(res == VK_SUCCESS);
2216 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002217 bufInfo.push_back(newBufInfo);
2218 prevOffset = allocInfo.offset;
2219 }
2220
2221 // Destroy the buffers in reverse order.
2222 while(!bufInfo.empty())
2223 {
2224 const BufferInfo& currBufInfo = bufInfo.back();
2225 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2226 bufInfo.pop_back();
2227 }
2228 }
2229
Adam Sawickiee082772018-06-20 17:45:49 +02002230 // Test ring buffer.
2231 {
2232 // Allocate number of buffers that surely fit into this block.
2233 bufCreateInfo.size = bufSizeMax;
2234 for(size_t i = 0; i < maxBufCount; ++i)
2235 {
2236 BufferInfo newBufInfo;
2237 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2238 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002239 TEST(res == VK_SUCCESS);
2240 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002241 bufInfo.push_back(newBufInfo);
2242 prevOffset = allocInfo.offset;
2243 }
2244
2245 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2246 const size_t buffersPerIter = maxBufCount / 10 - 1;
2247 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2248 for(size_t iter = 0; iter < iterCount; ++iter)
2249 {
2250 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2251 {
2252 const BufferInfo& currBufInfo = bufInfo.front();
2253 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2254 bufInfo.erase(bufInfo.begin());
2255 }
2256 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2257 {
2258 BufferInfo newBufInfo;
2259 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2260 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002261 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002262 bufInfo.push_back(newBufInfo);
2263 }
2264 }
2265
2266 // Allocate buffers until we reach out-of-memory.
2267 uint32_t debugIndex = 0;
2268 while(res == VK_SUCCESS)
2269 {
2270 BufferInfo newBufInfo;
2271 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2272 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2273 if(res == VK_SUCCESS)
2274 {
2275 bufInfo.push_back(newBufInfo);
2276 }
2277 else
2278 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002279 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002280 }
2281 ++debugIndex;
2282 }
2283
2284 // Destroy the buffers in random order.
2285 while(!bufInfo.empty())
2286 {
2287 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2288 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2289 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2290 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2291 }
2292 }
2293
Adam Sawicki680b2252018-08-22 14:47:32 +02002294 // Test double stack.
2295 {
2296 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2297 VkDeviceSize prevOffsetLower = 0;
2298 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2299 for(size_t i = 0; i < maxBufCount; ++i)
2300 {
2301 const bool upperAddress = (i % 2) != 0;
2302 if(upperAddress)
2303 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2304 else
2305 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2306 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2307 BufferInfo newBufInfo;
2308 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2309 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002310 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002311 if(upperAddress)
2312 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002313 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002314 prevOffsetUpper = allocInfo.offset;
2315 }
2316 else
2317 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002318 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002319 prevOffsetLower = allocInfo.offset;
2320 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002321 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002322 bufInfo.push_back(newBufInfo);
2323 }
2324
2325 // Destroy few buffers from top of the stack.
2326 for(size_t i = 0; i < maxBufCount / 5; ++i)
2327 {
2328 const BufferInfo& currBufInfo = bufInfo.back();
2329 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2330 bufInfo.pop_back();
2331 }
2332
2333 // Create some more
2334 for(size_t i = 0; i < maxBufCount / 5; ++i)
2335 {
2336 const bool upperAddress = (i % 2) != 0;
2337 if(upperAddress)
2338 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2339 else
2340 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2341 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2342 BufferInfo newBufInfo;
2343 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2344 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002345 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002346 bufInfo.push_back(newBufInfo);
2347 }
2348
2349 // Destroy the buffers in reverse order.
2350 while(!bufInfo.empty())
2351 {
2352 const BufferInfo& currBufInfo = bufInfo.back();
2353 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2354 bufInfo.pop_back();
2355 }
2356
2357 // Create buffers on both sides until we reach out of memory.
2358 prevOffsetLower = 0;
2359 prevOffsetUpper = poolCreateInfo.blockSize;
2360 res = VK_SUCCESS;
2361 for(size_t i = 0; res == VK_SUCCESS; ++i)
2362 {
2363 const bool upperAddress = (i % 2) != 0;
2364 if(upperAddress)
2365 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2366 else
2367 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2368 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2369 BufferInfo newBufInfo;
2370 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2371 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2372 if(res == VK_SUCCESS)
2373 {
2374 if(upperAddress)
2375 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002376 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002377 prevOffsetUpper = allocInfo.offset;
2378 }
2379 else
2380 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002381 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002382 prevOffsetLower = allocInfo.offset;
2383 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002384 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002385 bufInfo.push_back(newBufInfo);
2386 }
2387 }
2388
2389 // Destroy the buffers in random order.
2390 while(!bufInfo.empty())
2391 {
2392 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2393 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2394 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2395 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2396 }
2397
2398 // Create buffers on upper side only, constant size, until we reach out of memory.
2399 prevOffsetUpper = poolCreateInfo.blockSize;
2400 res = VK_SUCCESS;
2401 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2402 bufCreateInfo.size = bufSizeMax;
2403 for(size_t i = 0; res == VK_SUCCESS; ++i)
2404 {
2405 BufferInfo newBufInfo;
2406 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2407 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2408 if(res == VK_SUCCESS)
2409 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002410 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002411 prevOffsetUpper = allocInfo.offset;
2412 bufInfo.push_back(newBufInfo);
2413 }
2414 }
2415
2416 // Destroy the buffers in reverse order.
2417 while(!bufInfo.empty())
2418 {
2419 const BufferInfo& currBufInfo = bufInfo.back();
2420 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2421 bufInfo.pop_back();
2422 }
2423 }
2424
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002425 // Test ring buffer with lost allocations.
2426 {
2427 // Allocate number of buffers until pool is full.
2428 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2429 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2430 res = VK_SUCCESS;
2431 for(size_t i = 0; res == VK_SUCCESS; ++i)
2432 {
2433 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2434
2435 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2436
2437 BufferInfo newBufInfo;
2438 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2439 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2440 if(res == VK_SUCCESS)
2441 bufInfo.push_back(newBufInfo);
2442 }
2443
2444 // Free first half of it.
2445 {
2446 const size_t buffersToDelete = bufInfo.size() / 2;
2447 for(size_t i = 0; i < buffersToDelete; ++i)
2448 {
2449 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2450 }
2451 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2452 }
2453
2454 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002455 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002456 res = VK_SUCCESS;
2457 for(size_t i = 0; res == VK_SUCCESS; ++i)
2458 {
2459 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2460
2461 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2462
2463 BufferInfo newBufInfo;
2464 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2465 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2466 if(res == VK_SUCCESS)
2467 bufInfo.push_back(newBufInfo);
2468 }
2469
2470 VkDeviceSize firstNewOffset;
2471 {
2472 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2473
2474 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2475 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2476 bufCreateInfo.size = bufSizeMax;
2477
2478 BufferInfo newBufInfo;
2479 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2480 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002481 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002482 bufInfo.push_back(newBufInfo);
2483 firstNewOffset = allocInfo.offset;
2484
2485 // Make sure at least one buffer from the beginning became lost.
2486 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002487 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002488 }
2489
2490 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2491 size_t newCount = 1;
2492 for(;;)
2493 {
2494 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2495
2496 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2497
2498 BufferInfo newBufInfo;
2499 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2500 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002501 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002502 bufInfo.push_back(newBufInfo);
2503 ++newCount;
2504 if(allocInfo.offset < firstNewOffset)
2505 break;
2506 }
2507
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002508 // Delete buffers that are lost.
2509 for(size_t i = bufInfo.size(); i--; )
2510 {
2511 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2512 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2513 {
2514 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2515 bufInfo.erase(bufInfo.begin() + i);
2516 }
2517 }
2518
2519 // Test vmaMakePoolAllocationsLost
2520 {
2521 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2522
2523 size_t lostAllocCount = SIZE_MAX;
2524 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002525 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002526
2527 size_t realLostAllocCount = 0;
2528 for(size_t i = 0; i < bufInfo.size(); ++i)
2529 {
2530 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2531 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2532 ++realLostAllocCount;
2533 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002534 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002535 }
2536
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002537 // Destroy all the buffers in forward order.
2538 for(size_t i = 0; i < bufInfo.size(); ++i)
2539 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2540 bufInfo.clear();
2541 }
2542
Adam Sawicki70a683e2018-08-24 15:36:32 +02002543 vmaDestroyPool(g_hAllocator, pool);
2544}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002545
Adam Sawicki70a683e2018-08-24 15:36:32 +02002546static void TestLinearAllocatorMultiBlock()
2547{
2548 wprintf(L"Test linear allocator multi block\n");
2549
2550 RandomNumberGenerator rand{345673};
2551
2552 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2553 sampleBufCreateInfo.size = 1024 * 1024;
2554 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2555
2556 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2557 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2558
2559 VmaPoolCreateInfo poolCreateInfo = {};
2560 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2561 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002562 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002563
2564 VmaPool pool = nullptr;
2565 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002566 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002567
2568 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2569
2570 VmaAllocationCreateInfo allocCreateInfo = {};
2571 allocCreateInfo.pool = pool;
2572
2573 std::vector<BufferInfo> bufInfo;
2574 VmaAllocationInfo allocInfo;
2575
2576 // Test one-time free.
2577 {
2578 // Allocate buffers until we move to a second block.
2579 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2580 for(uint32_t i = 0; ; ++i)
2581 {
2582 BufferInfo newBufInfo;
2583 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2584 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002585 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002586 bufInfo.push_back(newBufInfo);
2587 if(lastMem && allocInfo.deviceMemory != lastMem)
2588 {
2589 break;
2590 }
2591 lastMem = allocInfo.deviceMemory;
2592 }
2593
Adam Sawickib8d34d52018-10-03 17:41:20 +02002594 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002595
2596 // Make sure that pool has now two blocks.
2597 VmaPoolStats poolStats = {};
2598 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002599 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002600
2601 // Destroy all the buffers in random order.
2602 while(!bufInfo.empty())
2603 {
2604 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2605 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2606 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2607 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2608 }
2609
2610 // Make sure that pool has now at most one block.
2611 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002612 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002613 }
2614
2615 // Test stack.
2616 {
2617 // Allocate buffers until we move to a second block.
2618 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2619 for(uint32_t i = 0; ; ++i)
2620 {
2621 BufferInfo newBufInfo;
2622 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2623 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002624 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002625 bufInfo.push_back(newBufInfo);
2626 if(lastMem && allocInfo.deviceMemory != lastMem)
2627 {
2628 break;
2629 }
2630 lastMem = allocInfo.deviceMemory;
2631 }
2632
Adam Sawickib8d34d52018-10-03 17:41:20 +02002633 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002634
2635 // Add few more buffers.
2636 for(uint32_t i = 0; i < 5; ++i)
2637 {
2638 BufferInfo newBufInfo;
2639 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2640 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002641 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002642 bufInfo.push_back(newBufInfo);
2643 }
2644
2645 // Make sure that pool has now two blocks.
2646 VmaPoolStats poolStats = {};
2647 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002648 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002649
2650 // Delete half of buffers, LIFO.
2651 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2652 {
2653 const BufferInfo& currBufInfo = bufInfo.back();
2654 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2655 bufInfo.pop_back();
2656 }
2657
2658 // Add one more buffer.
2659 BufferInfo newBufInfo;
2660 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2661 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002662 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002663 bufInfo.push_back(newBufInfo);
2664
2665 // Make sure that pool has now one block.
2666 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002667 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002668
2669 // Delete all the remaining buffers, LIFO.
2670 while(!bufInfo.empty())
2671 {
2672 const BufferInfo& currBufInfo = bufInfo.back();
2673 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2674 bufInfo.pop_back();
2675 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002676 }
2677
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002678 vmaDestroyPool(g_hAllocator, pool);
2679}
2680
Adam Sawickifd11d752018-08-22 15:02:10 +02002681static void ManuallyTestLinearAllocator()
2682{
2683 VmaStats origStats;
2684 vmaCalculateStats(g_hAllocator, &origStats);
2685
2686 wprintf(L"Manually test linear allocator\n");
2687
2688 RandomNumberGenerator rand{645332};
2689
2690 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2691 sampleBufCreateInfo.size = 1024; // Whatever.
2692 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2693
2694 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2695 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2696
2697 VmaPoolCreateInfo poolCreateInfo = {};
2698 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002699 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002700
2701 poolCreateInfo.blockSize = 10 * 1024;
2702 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2703 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2704
2705 VmaPool pool = nullptr;
2706 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002707 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002708
2709 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2710
2711 VmaAllocationCreateInfo allocCreateInfo = {};
2712 allocCreateInfo.pool = pool;
2713
2714 std::vector<BufferInfo> bufInfo;
2715 VmaAllocationInfo allocInfo;
2716 BufferInfo newBufInfo;
2717
2718 // Test double stack.
2719 {
2720 /*
2721 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2722 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2723
2724 Totally:
2725 1 block allocated
2726 10240 Vulkan bytes
2727 6 new allocations
2728 2256 bytes in allocations
2729 */
2730
2731 bufCreateInfo.size = 32;
2732 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2733 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002734 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002735 bufInfo.push_back(newBufInfo);
2736
2737 bufCreateInfo.size = 1024;
2738 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2739 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002740 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002741 bufInfo.push_back(newBufInfo);
2742
2743 bufCreateInfo.size = 32;
2744 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2745 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002746 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002747 bufInfo.push_back(newBufInfo);
2748
2749 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2750
2751 bufCreateInfo.size = 128;
2752 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2753 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002754 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002755 bufInfo.push_back(newBufInfo);
2756
2757 bufCreateInfo.size = 1024;
2758 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2759 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002760 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002761 bufInfo.push_back(newBufInfo);
2762
2763 bufCreateInfo.size = 16;
2764 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2765 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002766 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002767 bufInfo.push_back(newBufInfo);
2768
2769 VmaStats currStats;
2770 vmaCalculateStats(g_hAllocator, &currStats);
2771 VmaPoolStats poolStats;
2772 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2773
2774 char* statsStr = nullptr;
2775 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2776
2777 // PUT BREAKPOINT HERE TO CHECK.
2778 // Inspect: currStats versus origStats, poolStats, statsStr.
2779 int I = 0;
2780
2781 vmaFreeStatsString(g_hAllocator, statsStr);
2782
2783 // Destroy the buffers in reverse order.
2784 while(!bufInfo.empty())
2785 {
2786 const BufferInfo& currBufInfo = bufInfo.back();
2787 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2788 bufInfo.pop_back();
2789 }
2790 }
2791
2792 vmaDestroyPool(g_hAllocator, pool);
2793}
2794
Adam Sawicki80927152018-09-07 17:27:23 +02002795static void BenchmarkAlgorithmsCase(FILE* file,
2796 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002797 bool empty,
2798 VmaAllocationCreateFlags allocStrategy,
2799 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002800{
2801 RandomNumberGenerator rand{16223};
2802
2803 const VkDeviceSize bufSizeMin = 32;
2804 const VkDeviceSize bufSizeMax = 1024;
2805 const size_t maxBufCapacity = 10000;
2806 const uint32_t iterationCount = 10;
2807
2808 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2809 sampleBufCreateInfo.size = bufSizeMax;
2810 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2811
2812 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2813 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2814
2815 VmaPoolCreateInfo poolCreateInfo = {};
2816 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002817 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002818
2819 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002820 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002821 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2822
2823 VmaPool pool = nullptr;
2824 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002825 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002826
2827 // Buffer created just to get memory requirements. Never bound to any memory.
2828 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2829 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002830 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002831
2832 VkMemoryRequirements memReq = {};
2833 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2834
2835 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2836
2837 VmaAllocationCreateInfo allocCreateInfo = {};
2838 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002839 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002840
2841 VmaAllocation alloc;
2842 std::vector<VmaAllocation> baseAllocations;
2843
2844 if(!empty)
2845 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002846 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002847 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002848 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002849 {
2850 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2851 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002852 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002853 baseAllocations.push_back(alloc);
2854 totalSize += memReq.size;
2855 }
2856
2857 // Delete half of them, choose randomly.
2858 size_t allocsToDelete = baseAllocations.size() / 2;
2859 for(size_t i = 0; i < allocsToDelete; ++i)
2860 {
2861 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2862 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2863 baseAllocations.erase(baseAllocations.begin() + index);
2864 }
2865 }
2866
2867 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002868 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002869 std::vector<VmaAllocation> testAllocations;
2870 testAllocations.reserve(allocCount);
2871 duration allocTotalDuration = duration::zero();
2872 duration freeTotalDuration = duration::zero();
2873 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2874 {
2875 // Allocations
2876 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2877 for(size_t i = 0; i < allocCount; ++i)
2878 {
2879 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2880 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002881 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002882 testAllocations.push_back(alloc);
2883 }
2884 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2885
2886 // Deallocations
2887 switch(freeOrder)
2888 {
2889 case FREE_ORDER::FORWARD:
2890 // Leave testAllocations unchanged.
2891 break;
2892 case FREE_ORDER::BACKWARD:
2893 std::reverse(testAllocations.begin(), testAllocations.end());
2894 break;
2895 case FREE_ORDER::RANDOM:
2896 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2897 break;
2898 default: assert(0);
2899 }
2900
2901 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2902 for(size_t i = 0; i < allocCount; ++i)
2903 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2904 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2905
2906 testAllocations.clear();
2907 }
2908
2909 // Delete baseAllocations
2910 while(!baseAllocations.empty())
2911 {
2912 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2913 baseAllocations.pop_back();
2914 }
2915
2916 vmaDestroyPool(g_hAllocator, pool);
2917
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002918 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2919 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2920
Adam Sawicki80927152018-09-07 17:27:23 +02002921 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2922 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002923 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002924 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002925 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002926 allocTotalSeconds,
2927 freeTotalSeconds);
2928
2929 if(file)
2930 {
2931 std::string currTime;
2932 CurrentTimeToStr(currTime);
2933
Adam Sawicki80927152018-09-07 17:27:23 +02002934 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002935 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002936 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002937 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002938 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002939 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2940 allocTotalSeconds,
2941 freeTotalSeconds);
2942 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002943}
2944
Adam Sawicki80927152018-09-07 17:27:23 +02002945static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002946{
Adam Sawicki80927152018-09-07 17:27:23 +02002947 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002948
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002949 if(file)
2950 {
2951 fprintf(file,
2952 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002953 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002954 "Allocation time (s),Deallocation time (s)\n");
2955 }
2956
Adam Sawicki0a607132018-08-24 11:18:41 +02002957 uint32_t freeOrderCount = 1;
2958 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2959 freeOrderCount = 3;
2960 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2961 freeOrderCount = 2;
2962
2963 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002964 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002965
2966 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2967 {
2968 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2969 switch(freeOrderIndex)
2970 {
2971 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2972 case 1: freeOrder = FREE_ORDER::FORWARD; break;
2973 case 2: freeOrder = FREE_ORDER::RANDOM; break;
2974 default: assert(0);
2975 }
2976
2977 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
2978 {
Adam Sawicki80927152018-09-07 17:27:23 +02002979 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02002980 {
Adam Sawicki80927152018-09-07 17:27:23 +02002981 uint32_t algorithm = 0;
2982 switch(algorithmIndex)
2983 {
2984 case 0:
2985 break;
2986 case 1:
2987 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
2988 break;
2989 case 2:
2990 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2991 break;
2992 default:
2993 assert(0);
2994 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002995
Adam Sawicki80927152018-09-07 17:27:23 +02002996 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002997 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
2998 {
2999 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003000 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003001 {
3002 switch(allocStrategyIndex)
3003 {
3004 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3005 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3006 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3007 default: assert(0);
3008 }
3009 }
3010
Adam Sawicki80927152018-09-07 17:27:23 +02003011 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003012 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003013 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003014 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003015 strategy,
3016 freeOrder); // freeOrder
3017 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003018 }
3019 }
3020 }
3021}
3022
Adam Sawickib8333fb2018-03-13 16:15:53 +01003023static void TestPool_SameSize()
3024{
3025 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3026 const size_t BUF_COUNT = 100;
3027 VkResult res;
3028
3029 RandomNumberGenerator rand{123};
3030
3031 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3032 bufferInfo.size = BUF_SIZE;
3033 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3034
3035 uint32_t memoryTypeBits = UINT32_MAX;
3036 {
3037 VkBuffer dummyBuffer;
3038 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003039 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003040
3041 VkMemoryRequirements memReq;
3042 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3043 memoryTypeBits = memReq.memoryTypeBits;
3044
3045 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3046 }
3047
3048 VmaAllocationCreateInfo poolAllocInfo = {};
3049 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3050 uint32_t memTypeIndex;
3051 res = vmaFindMemoryTypeIndex(
3052 g_hAllocator,
3053 memoryTypeBits,
3054 &poolAllocInfo,
3055 &memTypeIndex);
3056
3057 VmaPoolCreateInfo poolCreateInfo = {};
3058 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3059 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3060 poolCreateInfo.minBlockCount = 1;
3061 poolCreateInfo.maxBlockCount = 4;
3062 poolCreateInfo.frameInUseCount = 0;
3063
3064 VmaPool pool;
3065 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003066 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003067
3068 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3069
3070 VmaAllocationCreateInfo allocInfo = {};
3071 allocInfo.pool = pool;
3072 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3073 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3074
3075 struct BufItem
3076 {
3077 VkBuffer Buf;
3078 VmaAllocation Alloc;
3079 };
3080 std::vector<BufItem> items;
3081
3082 // Fill entire pool.
3083 for(size_t i = 0; i < BUF_COUNT; ++i)
3084 {
3085 BufItem item;
3086 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003087 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003088 items.push_back(item);
3089 }
3090
3091 // Make sure that another allocation would fail.
3092 {
3093 BufItem item;
3094 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003095 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003096 }
3097
3098 // Validate that no buffer is lost. Also check that they are not mapped.
3099 for(size_t i = 0; i < items.size(); ++i)
3100 {
3101 VmaAllocationInfo allocInfo;
3102 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003103 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3104 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003105 }
3106
3107 // Free some percent of random items.
3108 {
3109 const size_t PERCENT_TO_FREE = 10;
3110 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3111 for(size_t i = 0; i < itemsToFree; ++i)
3112 {
3113 size_t index = (size_t)rand.Generate() % items.size();
3114 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3115 items.erase(items.begin() + index);
3116 }
3117 }
3118
3119 // Randomly allocate and free items.
3120 {
3121 const size_t OPERATION_COUNT = BUF_COUNT;
3122 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3123 {
3124 bool allocate = rand.Generate() % 2 != 0;
3125 if(allocate)
3126 {
3127 if(items.size() < BUF_COUNT)
3128 {
3129 BufItem item;
3130 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003131 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003132 items.push_back(item);
3133 }
3134 }
3135 else // Free
3136 {
3137 if(!items.empty())
3138 {
3139 size_t index = (size_t)rand.Generate() % items.size();
3140 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3141 items.erase(items.begin() + index);
3142 }
3143 }
3144 }
3145 }
3146
3147 // Allocate up to maximum.
3148 while(items.size() < BUF_COUNT)
3149 {
3150 BufItem item;
3151 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003152 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003153 items.push_back(item);
3154 }
3155
3156 // Validate that no buffer is lost.
3157 for(size_t i = 0; i < items.size(); ++i)
3158 {
3159 VmaAllocationInfo allocInfo;
3160 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003161 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003162 }
3163
3164 // Next frame.
3165 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3166
3167 // Allocate another BUF_COUNT buffers.
3168 for(size_t i = 0; i < BUF_COUNT; ++i)
3169 {
3170 BufItem item;
3171 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003172 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003173 items.push_back(item);
3174 }
3175
3176 // Make sure the first BUF_COUNT is lost. Delete them.
3177 for(size_t i = 0; i < BUF_COUNT; ++i)
3178 {
3179 VmaAllocationInfo allocInfo;
3180 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003181 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003182 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3183 }
3184 items.erase(items.begin(), items.begin() + BUF_COUNT);
3185
3186 // Validate that no buffer is lost.
3187 for(size_t i = 0; i < items.size(); ++i)
3188 {
3189 VmaAllocationInfo allocInfo;
3190 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003191 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003192 }
3193
3194 // Free one item.
3195 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3196 items.pop_back();
3197
3198 // Validate statistics.
3199 {
3200 VmaPoolStats poolStats = {};
3201 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003202 TEST(poolStats.allocationCount == items.size());
3203 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3204 TEST(poolStats.unusedRangeCount == 1);
3205 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3206 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003207 }
3208
3209 // Free all remaining items.
3210 for(size_t i = items.size(); i--; )
3211 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3212 items.clear();
3213
3214 // Allocate maximum items again.
3215 for(size_t i = 0; i < BUF_COUNT; ++i)
3216 {
3217 BufItem item;
3218 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003219 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003220 items.push_back(item);
3221 }
3222
3223 // Delete every other item.
3224 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3225 {
3226 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3227 items.erase(items.begin() + i);
3228 }
3229
3230 // Defragment!
3231 {
3232 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3233 for(size_t i = 0; i < items.size(); ++i)
3234 allocationsToDefragment[i] = items[i].Alloc;
3235
3236 VmaDefragmentationStats defragmentationStats;
3237 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003238 TEST(res == VK_SUCCESS);
3239 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003240 }
3241
3242 // Free all remaining items.
3243 for(size_t i = items.size(); i--; )
3244 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3245 items.clear();
3246
3247 ////////////////////////////////////////////////////////////////////////////////
3248 // Test for vmaMakePoolAllocationsLost
3249
3250 // Allocate 4 buffers on frame 10.
3251 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3252 for(size_t i = 0; i < 4; ++i)
3253 {
3254 BufItem item;
3255 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003256 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003257 items.push_back(item);
3258 }
3259
3260 // Touch first 2 of them on frame 11.
3261 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3262 for(size_t i = 0; i < 2; ++i)
3263 {
3264 VmaAllocationInfo allocInfo;
3265 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3266 }
3267
3268 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3269 size_t lostCount = 0xDEADC0DE;
3270 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003271 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003272
3273 // Make another call. Now 0 should be lost.
3274 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003275 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003276
3277 // Make another call, with null count. Should not crash.
3278 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3279
3280 // END: Free all remaining items.
3281 for(size_t i = items.size(); i--; )
3282 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3283
3284 items.clear();
3285
Adam Sawickid2924172018-06-11 12:48:46 +02003286 ////////////////////////////////////////////////////////////////////////////////
3287 // Test for allocation too large for pool
3288
3289 {
3290 VmaAllocationCreateInfo allocCreateInfo = {};
3291 allocCreateInfo.pool = pool;
3292
3293 VkMemoryRequirements memReq;
3294 memReq.memoryTypeBits = UINT32_MAX;
3295 memReq.alignment = 1;
3296 memReq.size = poolCreateInfo.blockSize + 4;
3297
3298 VmaAllocation alloc = nullptr;
3299 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003300 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003301 }
3302
Adam Sawickib8333fb2018-03-13 16:15:53 +01003303 vmaDestroyPool(g_hAllocator, pool);
3304}
3305
Adam Sawickib0c36362018-11-13 16:17:38 +01003306static void TestResize()
3307{
3308 wprintf(L"Testing vmaResizeAllocation...\n");
3309
3310 const VkDeviceSize KILOBYTE = 1024ull;
3311 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3312
3313 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3314 bufCreateInfo.size = 2 * MEGABYTE;
3315 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3316
3317 VmaAllocationCreateInfo allocCreateInfo = {};
3318 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3319
3320 uint32_t memTypeIndex = UINT32_MAX;
3321 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3322
3323 VmaPoolCreateInfo poolCreateInfo = {};
3324 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3325 poolCreateInfo.blockSize = 8 * MEGABYTE;
3326 poolCreateInfo.minBlockCount = 1;
3327 poolCreateInfo.maxBlockCount = 1;
3328 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3329
3330 VmaPool pool;
3331 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3332
3333 allocCreateInfo.pool = pool;
3334
3335 // Fill 8 MB pool with 4 * 2 MB allocations.
3336 VmaAllocation allocs[4] = {};
3337
3338 VkMemoryRequirements memReq = {};
3339 memReq.memoryTypeBits = UINT32_MAX;
3340 memReq.alignment = 4;
3341 memReq.size = bufCreateInfo.size;
3342
3343 VmaAllocationInfo allocInfo = {};
3344
3345 for(uint32_t i = 0; i < 4; ++i)
3346 {
3347 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3348 }
3349
3350 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3351
3352 // Case: Resize to the same size always succeeds.
3353 {
3354 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3355 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3356 TEST(allocInfo.size == 2ull * 1024 * 1024);
3357 }
3358
3359 // Case: Shrink allocation at the end.
3360 {
3361 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3362 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3363 TEST(allocInfo.size == 1ull * 1024 * 1024);
3364 }
3365
3366 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3367
3368 // Case: Shrink allocation before free space.
3369 {
3370 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3371 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3372 TEST(allocInfo.size == 512 * KILOBYTE);
3373 }
3374
3375 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3376
3377 // Case: Shrink allocation before next allocation.
3378 {
3379 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3380 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3381 TEST(allocInfo.size == 1 * MEGABYTE);
3382 }
3383
3384 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3385
3386 // Case: Grow allocation while there is even more space available.
3387 {
3388 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3389 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3390 TEST(allocInfo.size == 1 * MEGABYTE);
3391 }
3392
3393 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3394
3395 // Case: Grow allocation while there is exact amount of free space available.
3396 {
3397 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3398 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3399 TEST(allocInfo.size == 2 * MEGABYTE);
3400 }
3401
3402 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3403
3404 // Case: Fail to grow when there is not enough free space due to next allocation.
3405 {
3406 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3407 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3408 TEST(allocInfo.size == 2 * MEGABYTE);
3409 }
3410
3411 // Case: Fail to grow when there is not enough free space due to end of memory block.
3412 {
3413 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3414 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3415 TEST(allocInfo.size == 1 * MEGABYTE);
3416 }
3417
3418 for(uint32_t i = 4; i--; )
3419 {
3420 vmaFreeMemory(g_hAllocator, allocs[i]);
3421 }
3422
3423 vmaDestroyPool(g_hAllocator, pool);
3424
3425 // Test dedicated allocation
3426 {
3427 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3428 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3429 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3430
3431 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3432 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3433
3434 // Case: Resize to the same size always succeeds.
3435 {
3436 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3437 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3438 TEST(allocInfo.size == 2ull * 1024 * 1024);
3439 }
3440
3441 // Case: Shrinking fails.
3442 {
3443 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3444 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3445 TEST(allocInfo.size == 2ull * 1024 * 1024);
3446 }
3447
3448 // Case: Growing fails.
3449 {
3450 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3451 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3452 TEST(allocInfo.size == 2ull * 1024 * 1024);
3453 }
3454
3455 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3456 }
3457}
3458
Adam Sawickie44c6262018-06-15 14:30:39 +02003459static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3460{
3461 const uint8_t* pBytes = (const uint8_t*)pMemory;
3462 for(size_t i = 0; i < size; ++i)
3463 {
3464 if(pBytes[i] != pattern)
3465 {
3466 return false;
3467 }
3468 }
3469 return true;
3470}
3471
3472static void TestAllocationsInitialization()
3473{
3474 VkResult res;
3475
3476 const size_t BUF_SIZE = 1024;
3477
3478 // Create pool.
3479
3480 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3481 bufInfo.size = BUF_SIZE;
3482 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3483
3484 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3485 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3486
3487 VmaPoolCreateInfo poolCreateInfo = {};
3488 poolCreateInfo.blockSize = BUF_SIZE * 10;
3489 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3490 poolCreateInfo.maxBlockCount = 1;
3491 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003492 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003493
3494 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3495 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003496 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003497
3498 // Create one persistently mapped buffer to keep memory of this block mapped,
3499 // so that pointer to mapped data will remain (more or less...) valid even
3500 // after destruction of other allocations.
3501
3502 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3503 VkBuffer firstBuf;
3504 VmaAllocation firstAlloc;
3505 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003506 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003507
3508 // Test buffers.
3509
3510 for(uint32_t i = 0; i < 2; ++i)
3511 {
3512 const bool persistentlyMapped = i == 0;
3513 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3514 VkBuffer buf;
3515 VmaAllocation alloc;
3516 VmaAllocationInfo allocInfo;
3517 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003518 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003519
3520 void* pMappedData;
3521 if(!persistentlyMapped)
3522 {
3523 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003524 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003525 }
3526 else
3527 {
3528 pMappedData = allocInfo.pMappedData;
3529 }
3530
3531 // Validate initialized content
3532 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003533 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003534
3535 if(!persistentlyMapped)
3536 {
3537 vmaUnmapMemory(g_hAllocator, alloc);
3538 }
3539
3540 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3541
3542 // Validate freed content
3543 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003544 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003545 }
3546
3547 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3548 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3549}
3550
Adam Sawickib8333fb2018-03-13 16:15:53 +01003551static void TestPool_Benchmark(
3552 PoolTestResult& outResult,
3553 const PoolTestConfig& config)
3554{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003555 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003556
3557 RandomNumberGenerator mainRand{config.RandSeed};
3558
3559 uint32_t allocationSizeProbabilitySum = std::accumulate(
3560 config.AllocationSizes.begin(),
3561 config.AllocationSizes.end(),
3562 0u,
3563 [](uint32_t sum, const AllocationSize& allocSize) {
3564 return sum + allocSize.Probability;
3565 });
3566
3567 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3568 bufferInfo.size = 256; // Whatever.
3569 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3570
3571 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3572 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3573 imageInfo.extent.width = 256; // Whatever.
3574 imageInfo.extent.height = 256; // Whatever.
3575 imageInfo.extent.depth = 1;
3576 imageInfo.mipLevels = 1;
3577 imageInfo.arrayLayers = 1;
3578 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3579 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3580 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3581 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3582 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3583
3584 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3585 {
3586 VkBuffer dummyBuffer;
3587 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003588 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003589
3590 VkMemoryRequirements memReq;
3591 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3592 bufferMemoryTypeBits = memReq.memoryTypeBits;
3593
3594 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3595 }
3596
3597 uint32_t imageMemoryTypeBits = UINT32_MAX;
3598 {
3599 VkImage dummyImage;
3600 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003601 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003602
3603 VkMemoryRequirements memReq;
3604 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3605 imageMemoryTypeBits = memReq.memoryTypeBits;
3606
3607 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3608 }
3609
3610 uint32_t memoryTypeBits = 0;
3611 if(config.UsesBuffers() && config.UsesImages())
3612 {
3613 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3614 if(memoryTypeBits == 0)
3615 {
3616 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3617 return;
3618 }
3619 }
3620 else if(config.UsesBuffers())
3621 memoryTypeBits = bufferMemoryTypeBits;
3622 else if(config.UsesImages())
3623 memoryTypeBits = imageMemoryTypeBits;
3624 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003625 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003626
3627 VmaPoolCreateInfo poolCreateInfo = {};
3628 poolCreateInfo.memoryTypeIndex = 0;
3629 poolCreateInfo.minBlockCount = 1;
3630 poolCreateInfo.maxBlockCount = 1;
3631 poolCreateInfo.blockSize = config.PoolSize;
3632 poolCreateInfo.frameInUseCount = 1;
3633
3634 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3635 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3636 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3637
3638 VmaPool pool;
3639 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003640 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003641
3642 // Start time measurement - after creating pool and initializing data structures.
3643 time_point timeBeg = std::chrono::high_resolution_clock::now();
3644
3645 ////////////////////////////////////////////////////////////////////////////////
3646 // ThreadProc
3647 auto ThreadProc = [&](
3648 PoolTestThreadResult* outThreadResult,
3649 uint32_t randSeed,
3650 HANDLE frameStartEvent,
3651 HANDLE frameEndEvent) -> void
3652 {
3653 RandomNumberGenerator threadRand{randSeed};
3654
3655 outThreadResult->AllocationTimeMin = duration::max();
3656 outThreadResult->AllocationTimeSum = duration::zero();
3657 outThreadResult->AllocationTimeMax = duration::min();
3658 outThreadResult->DeallocationTimeMin = duration::max();
3659 outThreadResult->DeallocationTimeSum = duration::zero();
3660 outThreadResult->DeallocationTimeMax = duration::min();
3661 outThreadResult->AllocationCount = 0;
3662 outThreadResult->DeallocationCount = 0;
3663 outThreadResult->LostAllocationCount = 0;
3664 outThreadResult->LostAllocationTotalSize = 0;
3665 outThreadResult->FailedAllocationCount = 0;
3666 outThreadResult->FailedAllocationTotalSize = 0;
3667
3668 struct Item
3669 {
3670 VkDeviceSize BufferSize;
3671 VkExtent2D ImageSize;
3672 VkBuffer Buf;
3673 VkImage Image;
3674 VmaAllocation Alloc;
3675
3676 VkDeviceSize CalcSizeBytes() const
3677 {
3678 return BufferSize +
3679 ImageSize.width * ImageSize.height * 4;
3680 }
3681 };
3682 std::vector<Item> unusedItems, usedItems;
3683
3684 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3685
3686 // Create all items - all unused, not yet allocated.
3687 for(size_t i = 0; i < threadTotalItemCount; ++i)
3688 {
3689 Item item = {};
3690
3691 uint32_t allocSizeIndex = 0;
3692 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3693 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3694 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3695
3696 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3697 if(allocSize.BufferSizeMax > 0)
3698 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003699 TEST(allocSize.BufferSizeMin > 0);
3700 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003701 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3702 item.BufferSize = allocSize.BufferSizeMin;
3703 else
3704 {
3705 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3706 item.BufferSize = item.BufferSize / 16 * 16;
3707 }
3708 }
3709 else
3710 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003711 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003712 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3713 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3714 else
3715 {
3716 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3717 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3718 }
3719 }
3720
3721 unusedItems.push_back(item);
3722 }
3723
3724 auto Allocate = [&](Item& item) -> VkResult
3725 {
3726 VmaAllocationCreateInfo allocCreateInfo = {};
3727 allocCreateInfo.pool = pool;
3728 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3729 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3730
3731 if(item.BufferSize)
3732 {
3733 bufferInfo.size = item.BufferSize;
3734 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3735 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3736 }
3737 else
3738 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003739 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003740
3741 imageInfo.extent.width = item.ImageSize.width;
3742 imageInfo.extent.height = item.ImageSize.height;
3743 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3744 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3745 }
3746 };
3747
3748 ////////////////////////////////////////////////////////////////////////////////
3749 // Frames
3750 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3751 {
3752 WaitForSingleObject(frameStartEvent, INFINITE);
3753
3754 // Always make some percent of used bufs unused, to choose different used ones.
3755 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3756 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3757 {
3758 size_t index = threadRand.Generate() % usedItems.size();
3759 unusedItems.push_back(usedItems[index]);
3760 usedItems.erase(usedItems.begin() + index);
3761 }
3762
3763 // Determine which bufs we want to use in this frame.
3764 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3765 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003766 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003767 // Move some used to unused.
3768 while(usedBufCount < usedItems.size())
3769 {
3770 size_t index = threadRand.Generate() % usedItems.size();
3771 unusedItems.push_back(usedItems[index]);
3772 usedItems.erase(usedItems.begin() + index);
3773 }
3774 // Move some unused to used.
3775 while(usedBufCount > usedItems.size())
3776 {
3777 size_t index = threadRand.Generate() % unusedItems.size();
3778 usedItems.push_back(unusedItems[index]);
3779 unusedItems.erase(unusedItems.begin() + index);
3780 }
3781
3782 uint32_t touchExistingCount = 0;
3783 uint32_t touchLostCount = 0;
3784 uint32_t createSucceededCount = 0;
3785 uint32_t createFailedCount = 0;
3786
3787 // Touch all used bufs. If not created or lost, allocate.
3788 for(size_t i = 0; i < usedItems.size(); ++i)
3789 {
3790 Item& item = usedItems[i];
3791 // Not yet created.
3792 if(item.Alloc == VK_NULL_HANDLE)
3793 {
3794 res = Allocate(item);
3795 ++outThreadResult->AllocationCount;
3796 if(res != VK_SUCCESS)
3797 {
3798 item.Alloc = VK_NULL_HANDLE;
3799 item.Buf = VK_NULL_HANDLE;
3800 ++outThreadResult->FailedAllocationCount;
3801 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3802 ++createFailedCount;
3803 }
3804 else
3805 ++createSucceededCount;
3806 }
3807 else
3808 {
3809 // Touch.
3810 VmaAllocationInfo allocInfo;
3811 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3812 // Lost.
3813 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3814 {
3815 ++touchLostCount;
3816
3817 // Destroy.
3818 {
3819 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3820 if(item.Buf)
3821 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3822 else
3823 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3824 ++outThreadResult->DeallocationCount;
3825 }
3826 item.Alloc = VK_NULL_HANDLE;
3827 item.Buf = VK_NULL_HANDLE;
3828
3829 ++outThreadResult->LostAllocationCount;
3830 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3831
3832 // Recreate.
3833 res = Allocate(item);
3834 ++outThreadResult->AllocationCount;
3835 // Creation failed.
3836 if(res != VK_SUCCESS)
3837 {
3838 ++outThreadResult->FailedAllocationCount;
3839 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3840 ++createFailedCount;
3841 }
3842 else
3843 ++createSucceededCount;
3844 }
3845 else
3846 ++touchExistingCount;
3847 }
3848 }
3849
3850 /*
3851 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3852 randSeed, frameIndex,
3853 touchExistingCount, touchLostCount,
3854 createSucceededCount, createFailedCount);
3855 */
3856
3857 SetEvent(frameEndEvent);
3858 }
3859
3860 // Free all remaining items.
3861 for(size_t i = usedItems.size(); i--; )
3862 {
3863 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3864 if(usedItems[i].Buf)
3865 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3866 else
3867 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3868 ++outThreadResult->DeallocationCount;
3869 }
3870 for(size_t i = unusedItems.size(); i--; )
3871 {
3872 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3873 if(unusedItems[i].Buf)
3874 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3875 else
3876 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3877 ++outThreadResult->DeallocationCount;
3878 }
3879 };
3880
3881 // Launch threads.
3882 uint32_t threadRandSeed = mainRand.Generate();
3883 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3884 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3885 std::vector<std::thread> bkgThreads;
3886 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3887 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3888 {
3889 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3890 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3891 bkgThreads.emplace_back(std::bind(
3892 ThreadProc,
3893 &threadResults[threadIndex],
3894 threadRandSeed + threadIndex,
3895 frameStartEvents[threadIndex],
3896 frameEndEvents[threadIndex]));
3897 }
3898
3899 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003900 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003901 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3902 {
3903 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3904 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3905 SetEvent(frameStartEvents[threadIndex]);
3906 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3907 }
3908
3909 // Wait for threads finished
3910 for(size_t i = 0; i < bkgThreads.size(); ++i)
3911 {
3912 bkgThreads[i].join();
3913 CloseHandle(frameEndEvents[i]);
3914 CloseHandle(frameStartEvents[i]);
3915 }
3916 bkgThreads.clear();
3917
3918 // Finish time measurement - before destroying pool.
3919 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3920
3921 vmaDestroyPool(g_hAllocator, pool);
3922
3923 outResult.AllocationTimeMin = duration::max();
3924 outResult.AllocationTimeAvg = duration::zero();
3925 outResult.AllocationTimeMax = duration::min();
3926 outResult.DeallocationTimeMin = duration::max();
3927 outResult.DeallocationTimeAvg = duration::zero();
3928 outResult.DeallocationTimeMax = duration::min();
3929 outResult.LostAllocationCount = 0;
3930 outResult.LostAllocationTotalSize = 0;
3931 outResult.FailedAllocationCount = 0;
3932 outResult.FailedAllocationTotalSize = 0;
3933 size_t allocationCount = 0;
3934 size_t deallocationCount = 0;
3935 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3936 {
3937 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3938 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3939 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3940 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3941 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3942 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3943 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3944 allocationCount += threadResult.AllocationCount;
3945 deallocationCount += threadResult.DeallocationCount;
3946 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3947 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3948 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3949 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3950 }
3951 if(allocationCount)
3952 outResult.AllocationTimeAvg /= allocationCount;
3953 if(deallocationCount)
3954 outResult.DeallocationTimeAvg /= deallocationCount;
3955}
3956
3957static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3958{
3959 if(ptr1 < ptr2)
3960 return ptr1 + size1 > ptr2;
3961 else if(ptr2 < ptr1)
3962 return ptr2 + size2 > ptr1;
3963 else
3964 return true;
3965}
3966
3967static void TestMapping()
3968{
3969 wprintf(L"Testing mapping...\n");
3970
3971 VkResult res;
3972 uint32_t memTypeIndex = UINT32_MAX;
3973
3974 enum TEST
3975 {
3976 TEST_NORMAL,
3977 TEST_POOL,
3978 TEST_DEDICATED,
3979 TEST_COUNT
3980 };
3981 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3982 {
3983 VmaPool pool = nullptr;
3984 if(testIndex == TEST_POOL)
3985 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003986 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003987 VmaPoolCreateInfo poolInfo = {};
3988 poolInfo.memoryTypeIndex = memTypeIndex;
3989 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003990 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003991 }
3992
3993 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3994 bufInfo.size = 0x10000;
3995 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3996
3997 VmaAllocationCreateInfo allocCreateInfo = {};
3998 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3999 allocCreateInfo.pool = pool;
4000 if(testIndex == TEST_DEDICATED)
4001 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4002
4003 VmaAllocationInfo allocInfo;
4004
4005 // Mapped manually
4006
4007 // Create 2 buffers.
4008 BufferInfo bufferInfos[3];
4009 for(size_t i = 0; i < 2; ++i)
4010 {
4011 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4012 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004013 TEST(res == VK_SUCCESS);
4014 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004015 memTypeIndex = allocInfo.memoryType;
4016 }
4017
4018 // Map buffer 0.
4019 char* data00 = nullptr;
4020 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004021 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004022 data00[0xFFFF] = data00[0];
4023
4024 // Map buffer 0 second time.
4025 char* data01 = nullptr;
4026 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004027 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004028
4029 // Map buffer 1.
4030 char* data1 = nullptr;
4031 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004032 TEST(res == VK_SUCCESS && data1 != nullptr);
4033 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004034 data1[0xFFFF] = data1[0];
4035
4036 // Unmap buffer 0 two times.
4037 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4038 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4039 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004040 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004041
4042 // Unmap buffer 1.
4043 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4044 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004045 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004046
4047 // Create 3rd buffer - persistently mapped.
4048 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4049 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4050 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004051 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004052
4053 // Map buffer 2.
4054 char* data2 = nullptr;
4055 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004056 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004057 data2[0xFFFF] = data2[0];
4058
4059 // Unmap buffer 2.
4060 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4061 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004062 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004063
4064 // Destroy all buffers.
4065 for(size_t i = 3; i--; )
4066 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4067
4068 vmaDestroyPool(g_hAllocator, pool);
4069 }
4070}
4071
4072static void TestMappingMultithreaded()
4073{
4074 wprintf(L"Testing mapping multithreaded...\n");
4075
4076 static const uint32_t threadCount = 16;
4077 static const uint32_t bufferCount = 1024;
4078 static const uint32_t threadBufferCount = bufferCount / threadCount;
4079
4080 VkResult res;
4081 volatile uint32_t memTypeIndex = UINT32_MAX;
4082
4083 enum TEST
4084 {
4085 TEST_NORMAL,
4086 TEST_POOL,
4087 TEST_DEDICATED,
4088 TEST_COUNT
4089 };
4090 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4091 {
4092 VmaPool pool = nullptr;
4093 if(testIndex == TEST_POOL)
4094 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004095 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004096 VmaPoolCreateInfo poolInfo = {};
4097 poolInfo.memoryTypeIndex = memTypeIndex;
4098 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004099 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004100 }
4101
4102 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4103 bufCreateInfo.size = 0x10000;
4104 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4105
4106 VmaAllocationCreateInfo allocCreateInfo = {};
4107 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4108 allocCreateInfo.pool = pool;
4109 if(testIndex == TEST_DEDICATED)
4110 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4111
4112 std::thread threads[threadCount];
4113 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4114 {
4115 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4116 // ======== THREAD FUNCTION ========
4117
4118 RandomNumberGenerator rand{threadIndex};
4119
4120 enum class MODE
4121 {
4122 // Don't map this buffer at all.
4123 DONT_MAP,
4124 // Map and quickly unmap.
4125 MAP_FOR_MOMENT,
4126 // Map and unmap before destruction.
4127 MAP_FOR_LONGER,
4128 // Map two times. Quickly unmap, second unmap before destruction.
4129 MAP_TWO_TIMES,
4130 // Create this buffer as persistently mapped.
4131 PERSISTENTLY_MAPPED,
4132 COUNT
4133 };
4134 std::vector<BufferInfo> bufInfos{threadBufferCount};
4135 std::vector<MODE> bufModes{threadBufferCount};
4136
4137 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4138 {
4139 BufferInfo& bufInfo = bufInfos[bufferIndex];
4140 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4141 bufModes[bufferIndex] = mode;
4142
4143 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4144 if(mode == MODE::PERSISTENTLY_MAPPED)
4145 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4146
4147 VmaAllocationInfo allocInfo;
4148 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4149 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004150 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004151
4152 if(memTypeIndex == UINT32_MAX)
4153 memTypeIndex = allocInfo.memoryType;
4154
4155 char* data = nullptr;
4156
4157 if(mode == MODE::PERSISTENTLY_MAPPED)
4158 {
4159 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004160 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004161 }
4162 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4163 mode == MODE::MAP_TWO_TIMES)
4164 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004165 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004166 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004167 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004168
4169 if(mode == MODE::MAP_TWO_TIMES)
4170 {
4171 char* data2 = nullptr;
4172 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004173 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004174 }
4175 }
4176 else if(mode == MODE::DONT_MAP)
4177 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004178 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004179 }
4180 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004181 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004182
4183 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4184 if(data)
4185 data[0xFFFF] = data[0];
4186
4187 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4188 {
4189 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4190
4191 VmaAllocationInfo allocInfo;
4192 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4193 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004194 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004195 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004196 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004197 }
4198
4199 switch(rand.Generate() % 3)
4200 {
4201 case 0: Sleep(0); break; // Yield.
4202 case 1: Sleep(10); break; // 10 ms
4203 // default: No sleep.
4204 }
4205
4206 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4207 if(data)
4208 data[0xFFFF] = data[0];
4209 }
4210
4211 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4212 {
4213 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4214 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4215 {
4216 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4217
4218 VmaAllocationInfo allocInfo;
4219 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004220 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004221 }
4222
4223 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4224 }
4225 });
4226 }
4227
4228 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4229 threads[threadIndex].join();
4230
4231 vmaDestroyPool(g_hAllocator, pool);
4232 }
4233}
4234
4235static void WriteMainTestResultHeader(FILE* file)
4236{
4237 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004238 "Code,Time,"
4239 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004240 "Total Time (us),"
4241 "Allocation Time Min (us),"
4242 "Allocation Time Avg (us),"
4243 "Allocation Time Max (us),"
4244 "Deallocation Time Min (us),"
4245 "Deallocation Time Avg (us),"
4246 "Deallocation Time Max (us),"
4247 "Total Memory Allocated (B),"
4248 "Free Range Size Avg (B),"
4249 "Free Range Size Max (B)\n");
4250}
4251
4252static void WriteMainTestResult(
4253 FILE* file,
4254 const char* codeDescription,
4255 const char* testDescription,
4256 const Config& config, const Result& result)
4257{
4258 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4259 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4260 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4261 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4262 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4263 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4264 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4265
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004266 std::string currTime;
4267 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004268
4269 fprintf(file,
4270 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004271 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4272 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004273 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004274 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004275 totalTimeSeconds * 1e6f,
4276 allocationTimeMinSeconds * 1e6f,
4277 allocationTimeAvgSeconds * 1e6f,
4278 allocationTimeMaxSeconds * 1e6f,
4279 deallocationTimeMinSeconds * 1e6f,
4280 deallocationTimeAvgSeconds * 1e6f,
4281 deallocationTimeMaxSeconds * 1e6f,
4282 result.TotalMemoryAllocated,
4283 result.FreeRangeSizeAvg,
4284 result.FreeRangeSizeMax);
4285}
4286
4287static void WritePoolTestResultHeader(FILE* file)
4288{
4289 fprintf(file,
4290 "Code,Test,Time,"
4291 "Config,"
4292 "Total Time (us),"
4293 "Allocation Time Min (us),"
4294 "Allocation Time Avg (us),"
4295 "Allocation Time Max (us),"
4296 "Deallocation Time Min (us),"
4297 "Deallocation Time Avg (us),"
4298 "Deallocation Time Max (us),"
4299 "Lost Allocation Count,"
4300 "Lost Allocation Total Size (B),"
4301 "Failed Allocation Count,"
4302 "Failed Allocation Total Size (B)\n");
4303}
4304
4305static void WritePoolTestResult(
4306 FILE* file,
4307 const char* codeDescription,
4308 const char* testDescription,
4309 const PoolTestConfig& config,
4310 const PoolTestResult& result)
4311{
4312 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4313 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4314 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4315 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4316 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4317 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4318 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4319
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004320 std::string currTime;
4321 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004322
4323 fprintf(file,
4324 "%s,%s,%s,"
4325 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4326 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4327 // General
4328 codeDescription,
4329 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004330 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004331 // Config
4332 config.ThreadCount,
4333 (unsigned long long)config.PoolSize,
4334 config.FrameCount,
4335 config.TotalItemCount,
4336 config.UsedItemCountMin,
4337 config.UsedItemCountMax,
4338 config.ItemsToMakeUnusedPercent,
4339 // Results
4340 totalTimeSeconds * 1e6f,
4341 allocationTimeMinSeconds * 1e6f,
4342 allocationTimeAvgSeconds * 1e6f,
4343 allocationTimeMaxSeconds * 1e6f,
4344 deallocationTimeMinSeconds * 1e6f,
4345 deallocationTimeAvgSeconds * 1e6f,
4346 deallocationTimeMaxSeconds * 1e6f,
4347 result.LostAllocationCount,
4348 result.LostAllocationTotalSize,
4349 result.FailedAllocationCount,
4350 result.FailedAllocationTotalSize);
4351}
4352
4353static void PerformCustomMainTest(FILE* file)
4354{
4355 Config config{};
4356 config.RandSeed = 65735476;
4357 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4358 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4359 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4360 config.FreeOrder = FREE_ORDER::FORWARD;
4361 config.ThreadCount = 16;
4362 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004363 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004364
4365 // Buffers
4366 //config.AllocationSizes.push_back({4, 16, 1024});
4367 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4368
4369 // Images
4370 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4371 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4372
4373 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4374 config.AdditionalOperationCount = 1024;
4375
4376 Result result{};
4377 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004378 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004379 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4380}
4381
4382static void PerformCustomPoolTest(FILE* file)
4383{
4384 PoolTestConfig config;
4385 config.PoolSize = 100 * 1024 * 1024;
4386 config.RandSeed = 2345764;
4387 config.ThreadCount = 1;
4388 config.FrameCount = 200;
4389 config.ItemsToMakeUnusedPercent = 2;
4390
4391 AllocationSize allocSize = {};
4392 allocSize.BufferSizeMin = 1024;
4393 allocSize.BufferSizeMax = 1024 * 1024;
4394 allocSize.Probability = 1;
4395 config.AllocationSizes.push_back(allocSize);
4396
4397 allocSize.BufferSizeMin = 0;
4398 allocSize.BufferSizeMax = 0;
4399 allocSize.ImageSizeMin = 128;
4400 allocSize.ImageSizeMax = 1024;
4401 allocSize.Probability = 1;
4402 config.AllocationSizes.push_back(allocSize);
4403
4404 config.PoolSize = config.CalcAvgResourceSize() * 200;
4405 config.UsedItemCountMax = 160;
4406 config.TotalItemCount = config.UsedItemCountMax * 10;
4407 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4408
4409 g_MemoryAliasingWarningEnabled = false;
4410 PoolTestResult result = {};
4411 TestPool_Benchmark(result, config);
4412 g_MemoryAliasingWarningEnabled = true;
4413
4414 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4415}
4416
Adam Sawickib8333fb2018-03-13 16:15:53 +01004417static void PerformMainTests(FILE* file)
4418{
4419 uint32_t repeatCount = 1;
4420 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4421
4422 Config config{};
4423 config.RandSeed = 65735476;
4424 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4425 config.FreeOrder = FREE_ORDER::FORWARD;
4426
4427 size_t threadCountCount = 1;
4428 switch(ConfigType)
4429 {
4430 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4431 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4432 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4433 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4434 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4435 default: assert(0);
4436 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004437
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004438 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004439
Adam Sawickib8333fb2018-03-13 16:15:53 +01004440 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4441 {
4442 std::string desc1;
4443
4444 switch(threadCountIndex)
4445 {
4446 case 0:
4447 desc1 += "1_thread";
4448 config.ThreadCount = 1;
4449 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4450 break;
4451 case 1:
4452 desc1 += "16_threads+0%_common";
4453 config.ThreadCount = 16;
4454 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4455 break;
4456 case 2:
4457 desc1 += "16_threads+50%_common";
4458 config.ThreadCount = 16;
4459 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4460 break;
4461 case 3:
4462 desc1 += "16_threads+100%_common";
4463 config.ThreadCount = 16;
4464 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4465 break;
4466 case 4:
4467 desc1 += "2_threads+0%_common";
4468 config.ThreadCount = 2;
4469 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4470 break;
4471 case 5:
4472 desc1 += "2_threads+50%_common";
4473 config.ThreadCount = 2;
4474 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4475 break;
4476 case 6:
4477 desc1 += "2_threads+100%_common";
4478 config.ThreadCount = 2;
4479 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4480 break;
4481 default:
4482 assert(0);
4483 }
4484
4485 // 0 = buffers, 1 = images, 2 = buffers and images
4486 size_t buffersVsImagesCount = 2;
4487 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4488 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4489 {
4490 std::string desc2 = desc1;
4491 switch(buffersVsImagesIndex)
4492 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004493 case 0: desc2 += ",Buffers"; break;
4494 case 1: desc2 += ",Images"; break;
4495 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004496 default: assert(0);
4497 }
4498
4499 // 0 = small, 1 = large, 2 = small and large
4500 size_t smallVsLargeCount = 2;
4501 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4502 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4503 {
4504 std::string desc3 = desc2;
4505 switch(smallVsLargeIndex)
4506 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004507 case 0: desc3 += ",Small"; break;
4508 case 1: desc3 += ",Large"; break;
4509 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004510 default: assert(0);
4511 }
4512
4513 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4514 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4515 else
4516 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4517
4518 // 0 = varying sizes min...max, 1 = set of constant sizes
4519 size_t constantSizesCount = 1;
4520 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4521 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4522 {
4523 std::string desc4 = desc3;
4524 switch(constantSizesIndex)
4525 {
4526 case 0: desc4 += " Varying_sizes"; break;
4527 case 1: desc4 += " Constant_sizes"; break;
4528 default: assert(0);
4529 }
4530
4531 config.AllocationSizes.clear();
4532 // Buffers present
4533 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4534 {
4535 // Small
4536 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4537 {
4538 // Varying size
4539 if(constantSizesIndex == 0)
4540 config.AllocationSizes.push_back({4, 16, 1024});
4541 // Constant sizes
4542 else
4543 {
4544 config.AllocationSizes.push_back({1, 16, 16});
4545 config.AllocationSizes.push_back({1, 64, 64});
4546 config.AllocationSizes.push_back({1, 256, 256});
4547 config.AllocationSizes.push_back({1, 1024, 1024});
4548 }
4549 }
4550 // Large
4551 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4552 {
4553 // Varying size
4554 if(constantSizesIndex == 0)
4555 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4556 // Constant sizes
4557 else
4558 {
4559 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4560 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4561 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4562 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4563 }
4564 }
4565 }
4566 // Images present
4567 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4568 {
4569 // Small
4570 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4571 {
4572 // Varying size
4573 if(constantSizesIndex == 0)
4574 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4575 // Constant sizes
4576 else
4577 {
4578 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4579 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4580 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4581 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4582 }
4583 }
4584 // Large
4585 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4586 {
4587 // Varying size
4588 if(constantSizesIndex == 0)
4589 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4590 // Constant sizes
4591 else
4592 {
4593 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4594 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4595 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4596 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4597 }
4598 }
4599 }
4600
4601 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4602 size_t beginBytesToAllocateCount = 1;
4603 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4604 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4605 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4606 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4607 {
4608 std::string desc5 = desc4;
4609
4610 switch(beginBytesToAllocateIndex)
4611 {
4612 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004613 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004614 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4615 config.AdditionalOperationCount = 0;
4616 break;
4617 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004618 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004619 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4620 config.AdditionalOperationCount = 1024;
4621 break;
4622 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004623 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004624 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4625 config.AdditionalOperationCount = 1024;
4626 break;
4627 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004628 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004629 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4630 config.AdditionalOperationCount = 1024;
4631 break;
4632 default:
4633 assert(0);
4634 }
4635
Adam Sawicki0667e332018-08-24 17:26:44 +02004636 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004637 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004638 std::string desc6 = desc5;
4639 switch(strategyIndex)
4640 {
4641 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004642 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004643 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4644 break;
4645 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004646 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004647 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4648 break;
4649 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004650 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004651 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4652 break;
4653 default:
4654 assert(0);
4655 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004656
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004657 desc6 += ',';
4658 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004659
4660 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004661
4662 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4663 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004664 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004665
4666 Result result{};
4667 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004668 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004669 if(file)
4670 {
4671 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4672 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004673 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004674 }
4675 }
4676 }
4677 }
4678 }
4679 }
4680}
4681
4682static void PerformPoolTests(FILE* file)
4683{
4684 const size_t AVG_RESOURCES_PER_POOL = 300;
4685
4686 uint32_t repeatCount = 1;
4687 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4688
4689 PoolTestConfig config{};
4690 config.RandSeed = 2346343;
4691 config.FrameCount = 200;
4692 config.ItemsToMakeUnusedPercent = 2;
4693
4694 size_t threadCountCount = 1;
4695 switch(ConfigType)
4696 {
4697 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4698 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4699 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4700 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4701 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4702 default: assert(0);
4703 }
4704 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4705 {
4706 std::string desc1;
4707
4708 switch(threadCountIndex)
4709 {
4710 case 0:
4711 desc1 += "1_thread";
4712 config.ThreadCount = 1;
4713 break;
4714 case 1:
4715 desc1 += "16_threads";
4716 config.ThreadCount = 16;
4717 break;
4718 case 2:
4719 desc1 += "2_threads";
4720 config.ThreadCount = 2;
4721 break;
4722 default:
4723 assert(0);
4724 }
4725
4726 // 0 = buffers, 1 = images, 2 = buffers and images
4727 size_t buffersVsImagesCount = 2;
4728 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4729 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4730 {
4731 std::string desc2 = desc1;
4732 switch(buffersVsImagesIndex)
4733 {
4734 case 0: desc2 += " Buffers"; break;
4735 case 1: desc2 += " Images"; break;
4736 case 2: desc2 += " Buffers+Images"; break;
4737 default: assert(0);
4738 }
4739
4740 // 0 = small, 1 = large, 2 = small and large
4741 size_t smallVsLargeCount = 2;
4742 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4743 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4744 {
4745 std::string desc3 = desc2;
4746 switch(smallVsLargeIndex)
4747 {
4748 case 0: desc3 += " Small"; break;
4749 case 1: desc3 += " Large"; break;
4750 case 2: desc3 += " Small+Large"; break;
4751 default: assert(0);
4752 }
4753
4754 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4755 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4756 else
4757 config.PoolSize = 4ull * 1024 * 1024;
4758
4759 // 0 = varying sizes min...max, 1 = set of constant sizes
4760 size_t constantSizesCount = 1;
4761 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4762 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4763 {
4764 std::string desc4 = desc3;
4765 switch(constantSizesIndex)
4766 {
4767 case 0: desc4 += " Varying_sizes"; break;
4768 case 1: desc4 += " Constant_sizes"; break;
4769 default: assert(0);
4770 }
4771
4772 config.AllocationSizes.clear();
4773 // Buffers present
4774 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4775 {
4776 // Small
4777 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4778 {
4779 // Varying size
4780 if(constantSizesIndex == 0)
4781 config.AllocationSizes.push_back({4, 16, 1024});
4782 // Constant sizes
4783 else
4784 {
4785 config.AllocationSizes.push_back({1, 16, 16});
4786 config.AllocationSizes.push_back({1, 64, 64});
4787 config.AllocationSizes.push_back({1, 256, 256});
4788 config.AllocationSizes.push_back({1, 1024, 1024});
4789 }
4790 }
4791 // Large
4792 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4793 {
4794 // Varying size
4795 if(constantSizesIndex == 0)
4796 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4797 // Constant sizes
4798 else
4799 {
4800 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4801 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4802 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4803 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4804 }
4805 }
4806 }
4807 // Images present
4808 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4809 {
4810 // Small
4811 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4812 {
4813 // Varying size
4814 if(constantSizesIndex == 0)
4815 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4816 // Constant sizes
4817 else
4818 {
4819 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4820 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4821 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4822 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4823 }
4824 }
4825 // Large
4826 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4827 {
4828 // Varying size
4829 if(constantSizesIndex == 0)
4830 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4831 // Constant sizes
4832 else
4833 {
4834 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4835 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4836 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4837 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4838 }
4839 }
4840 }
4841
4842 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4843 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4844
4845 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4846 size_t subscriptionModeCount;
4847 switch(ConfigType)
4848 {
4849 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4850 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4851 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4852 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4853 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4854 default: assert(0);
4855 }
4856 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4857 {
4858 std::string desc5 = desc4;
4859
4860 switch(subscriptionModeIndex)
4861 {
4862 case 0:
4863 desc5 += " Subscription_66%";
4864 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4865 break;
4866 case 1:
4867 desc5 += " Subscription_133%";
4868 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4869 break;
4870 case 2:
4871 desc5 += " Subscription_100%";
4872 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4873 break;
4874 case 3:
4875 desc5 += " Subscription_33%";
4876 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4877 break;
4878 case 4:
4879 desc5 += " Subscription_166%";
4880 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4881 break;
4882 default:
4883 assert(0);
4884 }
4885
4886 config.TotalItemCount = config.UsedItemCountMax * 5;
4887 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4888
4889 const char* testDescription = desc5.c_str();
4890
4891 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4892 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004893 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004894
4895 PoolTestResult result{};
4896 g_MemoryAliasingWarningEnabled = false;
4897 TestPool_Benchmark(result, config);
4898 g_MemoryAliasingWarningEnabled = true;
4899 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4900 }
4901 }
4902 }
4903 }
4904 }
4905 }
4906}
4907
Adam Sawickia83793a2018-09-03 13:40:42 +02004908static void BasicTestBuddyAllocator()
4909{
4910 wprintf(L"Basic test buddy allocator\n");
4911
4912 RandomNumberGenerator rand{76543};
4913
4914 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4915 sampleBufCreateInfo.size = 1024; // Whatever.
4916 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4917
4918 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4919 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4920
4921 VmaPoolCreateInfo poolCreateInfo = {};
4922 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004923 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004924
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004925 // Deliberately adding 1023 to test usable size smaller than memory block size.
4926 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004927 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004928 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004929
4930 VmaPool pool = nullptr;
4931 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004932 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004933
4934 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4935
4936 VmaAllocationCreateInfo allocCreateInfo = {};
4937 allocCreateInfo.pool = pool;
4938
4939 std::vector<BufferInfo> bufInfo;
4940 BufferInfo newBufInfo;
4941 VmaAllocationInfo allocInfo;
4942
4943 bufCreateInfo.size = 1024 * 256;
4944 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4945 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004946 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004947 bufInfo.push_back(newBufInfo);
4948
4949 bufCreateInfo.size = 1024 * 512;
4950 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4951 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004952 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004953 bufInfo.push_back(newBufInfo);
4954
4955 bufCreateInfo.size = 1024 * 128;
4956 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4957 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004958 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004959 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004960
4961 // Test very small allocation, smaller than minimum node size.
4962 bufCreateInfo.size = 1;
4963 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4964 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004965 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004966 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004967
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004968 // Test some small allocation with alignment requirement.
4969 {
4970 VkMemoryRequirements memReq;
4971 memReq.alignment = 256;
4972 memReq.memoryTypeBits = UINT32_MAX;
4973 memReq.size = 32;
4974
4975 newBufInfo.Buffer = VK_NULL_HANDLE;
4976 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4977 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004978 TEST(res == VK_SUCCESS);
4979 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004980 bufInfo.push_back(newBufInfo);
4981 }
4982
4983 //SaveAllocatorStatsToFile(L"TEST.json");
4984
Adam Sawicki21017c62018-09-07 15:26:59 +02004985 VmaPoolStats stats = {};
4986 vmaGetPoolStats(g_hAllocator, pool, &stats);
4987 int DBG = 0; // Set breakpoint here to inspect `stats`.
4988
Adam Sawicki80927152018-09-07 17:27:23 +02004989 // Allocate enough new buffers to surely fall into second block.
4990 for(uint32_t i = 0; i < 32; ++i)
4991 {
4992 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4993 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4994 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004995 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004996 bufInfo.push_back(newBufInfo);
4997 }
4998
4999 SaveAllocatorStatsToFile(L"BuddyTest01.json");
5000
Adam Sawickia83793a2018-09-03 13:40:42 +02005001 // Destroy the buffers in random order.
5002 while(!bufInfo.empty())
5003 {
5004 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
5005 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
5006 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
5007 bufInfo.erase(bufInfo.begin() + indexToDestroy);
5008 }
5009
5010 vmaDestroyPool(g_hAllocator, pool);
5011}
5012
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005013static void BasicTestAllocatePages()
5014{
5015 wprintf(L"Basic test allocate pages\n");
5016
5017 RandomNumberGenerator rand{765461};
5018
5019 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5020 sampleBufCreateInfo.size = 1024; // Whatever.
5021 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5022
5023 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5024 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5025
5026 VmaPoolCreateInfo poolCreateInfo = {};
5027 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02005028 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005029
5030 // 1 block of 1 MB.
5031 poolCreateInfo.blockSize = 1024 * 1024;
5032 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
5033
5034 // Create pool.
5035 VmaPool pool = nullptr;
5036 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02005037 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005038
5039 // Make 100 allocations of 4 KB - they should fit into the pool.
5040 VkMemoryRequirements memReq;
5041 memReq.memoryTypeBits = UINT32_MAX;
5042 memReq.alignment = 4 * 1024;
5043 memReq.size = 4 * 1024;
5044
5045 VmaAllocationCreateInfo allocCreateInfo = {};
5046 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5047 allocCreateInfo.pool = pool;
5048
5049 constexpr uint32_t allocCount = 100;
5050
5051 std::vector<VmaAllocation> alloc{allocCount};
5052 std::vector<VmaAllocationInfo> allocInfo{allocCount};
5053 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005054 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005055 for(uint32_t i = 0; i < allocCount; ++i)
5056 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005057 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005058 allocInfo[i].pMappedData != nullptr &&
5059 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5060 allocInfo[i].memoryType == allocInfo[0].memoryType);
5061 }
5062
5063 // Free the allocations.
5064 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5065 std::fill(alloc.begin(), alloc.end(), nullptr);
5066 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5067
5068 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5069 // Also test optional allocationInfo = null.
5070 memReq.size = 100 * 1024;
5071 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005072 TEST(res != VK_SUCCESS);
5073 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005074
5075 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5076 memReq.size = 4 * 1024;
5077 memReq.alignment = 128 * 1024;
5078 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005079 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005080
5081 // Make 100 dedicated allocations of 4 KB.
5082 memReq.alignment = 4 * 1024;
5083 memReq.size = 4 * 1024;
5084
5085 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5086 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5087 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5088 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005089 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005090 for(uint32_t i = 0; i < allocCount; ++i)
5091 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005092 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005093 allocInfo[i].pMappedData != nullptr &&
5094 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5095 allocInfo[i].offset == 0);
5096 if(i > 0)
5097 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005098 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005099 }
5100 }
5101
5102 // Free the allocations.
5103 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5104 std::fill(alloc.begin(), alloc.end(), nullptr);
5105 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5106
5107 vmaDestroyPool(g_hAllocator, pool);
5108}
5109
Adam Sawickif2975342018-10-16 13:49:02 +02005110// Test the testing environment.
5111static void TestGpuData()
5112{
5113 RandomNumberGenerator rand = { 53434 };
5114
5115 std::vector<AllocInfo> allocInfo;
5116
5117 for(size_t i = 0; i < 100; ++i)
5118 {
5119 AllocInfo info = {};
5120
5121 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5122 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5123 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5124 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5125 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5126
5127 VmaAllocationCreateInfo allocCreateInfo = {};
5128 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5129
5130 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5131 TEST(res == VK_SUCCESS);
5132
5133 info.m_StartValue = rand.Generate();
5134
5135 allocInfo.push_back(std::move(info));
5136 }
5137
5138 UploadGpuData(allocInfo.data(), allocInfo.size());
5139
5140 ValidateGpuData(allocInfo.data(), allocInfo.size());
5141
5142 DestroyAllAllocations(allocInfo);
5143}
5144
Adam Sawickib8333fb2018-03-13 16:15:53 +01005145void Test()
5146{
5147 wprintf(L"TESTING:\n");
5148
Adam Sawicki5c8af7b2018-12-10 13:34:54 +01005149 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005150 {
5151 // # Temporarily insert custom tests here
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02005152 // ########################################
5153 // ########################################
Adam Sawicki80927152018-09-07 17:27:23 +02005154
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005155 TestDefragmentationSimple();
5156 TestDefragmentationFull();
5157 TestDefragmentationGpu();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005158 TestDefragmentationWholePool();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005159 return;
5160 }
5161
Adam Sawickib8333fb2018-03-13 16:15:53 +01005162 // # Simple tests
5163
5164 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005165 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005166#if VMA_DEBUG_MARGIN
5167 TestDebugMargin();
5168#else
5169 TestPool_SameSize();
5170 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01005171 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02005172#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005173#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5174 TestAllocationsInitialization();
5175#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005176 TestMapping();
5177 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005178 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005179 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005180 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005181
Adam Sawicki4338f662018-09-07 14:12:37 +02005182 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005183 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005184
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005185 {
5186 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005187 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005188 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005189 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005190 fclose(file);
5191 }
5192
Adam Sawickib8333fb2018-03-13 16:15:53 +01005193 TestDefragmentationSimple();
5194 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005195 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005196 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005197
5198 // # Detailed tests
5199 FILE* file;
5200 fopen_s(&file, "Results.csv", "w");
5201 assert(file != NULL);
5202
5203 WriteMainTestResultHeader(file);
5204 PerformMainTests(file);
5205 //PerformCustomMainTest(file);
5206
5207 WritePoolTestResultHeader(file);
5208 PerformPoolTests(file);
5209 //PerformCustomPoolTest(file);
5210
5211 fclose(file);
5212
5213 wprintf(L"Done.\n");
5214}
5215
Adam Sawickif1a793c2018-03-13 15:42:22 +01005216#endif // #ifdef _WIN32