blob: cbf15b687a6f3e3946f74a1aaa6c36141d551fc0 [file] [log] [blame]
Adam Sawickif1a793c2018-03-13 15:42:22 +01001#include "Tests.h"
2#include "VmaUsage.h"
3#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004#include <atomic>
5#include <thread>
6#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +01007
8#ifdef _WIN32
9
Adam Sawicki33d2ce72018-08-27 13:59:13 +020010static const char* CODE_DESCRIPTION = "Foo";
11
Adam Sawickif2975342018-10-16 13:49:02 +020012extern VkCommandBuffer g_hTemporaryCommandBuffer;
13void BeginSingleTimeCommands();
14void EndSingleTimeCommands();
15
Adam Sawicki0a607132018-08-24 11:18:41 +020016enum CONFIG_TYPE {
17 CONFIG_TYPE_MINIMUM,
18 CONFIG_TYPE_SMALL,
19 CONFIG_TYPE_AVERAGE,
20 CONFIG_TYPE_LARGE,
21 CONFIG_TYPE_MAXIMUM,
22 CONFIG_TYPE_COUNT
23};
24
Adam Sawickif2975342018-10-16 13:49:02 +020025static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
26//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020027
Adam Sawickib8333fb2018-03-13 16:15:53 +010028enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
29
Adam Sawicki0667e332018-08-24 17:26:44 +020030static const char* FREE_ORDER_NAMES[] = {
31 "FORWARD",
32 "BACKWARD",
33 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020034};
35
Adam Sawicki80927152018-09-07 17:27:23 +020036// Copy of internal VmaAlgorithmToStr.
37static const char* AlgorithmToStr(uint32_t algorithm)
38{
39 switch(algorithm)
40 {
41 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
42 return "Linear";
43 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
44 return "Buddy";
45 case 0:
46 return "Default";
47 default:
48 assert(0);
49 return "";
50 }
51}
52
Adam Sawickib8333fb2018-03-13 16:15:53 +010053struct AllocationSize
54{
55 uint32_t Probability;
56 VkDeviceSize BufferSizeMin, BufferSizeMax;
57 uint32_t ImageSizeMin, ImageSizeMax;
58};
59
60struct Config
61{
62 uint32_t RandSeed;
63 VkDeviceSize BeginBytesToAllocate;
64 uint32_t AdditionalOperationCount;
65 VkDeviceSize MaxBytesToAllocate;
66 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
67 std::vector<AllocationSize> AllocationSizes;
68 uint32_t ThreadCount;
69 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
70 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020071 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010072};
73
74struct Result
75{
76 duration TotalTime;
77 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
78 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
79 VkDeviceSize TotalMemoryAllocated;
80 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
81};
82
83void TestDefragmentationSimple();
84void TestDefragmentationFull();
85
86struct PoolTestConfig
87{
88 uint32_t RandSeed;
89 uint32_t ThreadCount;
90 VkDeviceSize PoolSize;
91 uint32_t FrameCount;
92 uint32_t TotalItemCount;
93 // Range for number of items used in each frame.
94 uint32_t UsedItemCountMin, UsedItemCountMax;
95 // Percent of items to make unused, and possibly make some others used in each frame.
96 uint32_t ItemsToMakeUnusedPercent;
97 std::vector<AllocationSize> AllocationSizes;
98
99 VkDeviceSize CalcAvgResourceSize() const
100 {
101 uint32_t probabilitySum = 0;
102 VkDeviceSize sizeSum = 0;
103 for(size_t i = 0; i < AllocationSizes.size(); ++i)
104 {
105 const AllocationSize& allocSize = AllocationSizes[i];
106 if(allocSize.BufferSizeMax > 0)
107 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
108 else
109 {
110 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
111 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
112 }
113 probabilitySum += allocSize.Probability;
114 }
115 return sizeSum / probabilitySum;
116 }
117
118 bool UsesBuffers() const
119 {
120 for(size_t i = 0; i < AllocationSizes.size(); ++i)
121 if(AllocationSizes[i].BufferSizeMax > 0)
122 return true;
123 return false;
124 }
125
126 bool UsesImages() const
127 {
128 for(size_t i = 0; i < AllocationSizes.size(); ++i)
129 if(AllocationSizes[i].ImageSizeMax > 0)
130 return true;
131 return false;
132 }
133};
134
135struct PoolTestResult
136{
137 duration TotalTime;
138 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
139 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
140 size_t LostAllocationCount, LostAllocationTotalSize;
141 size_t FailedAllocationCount, FailedAllocationTotalSize;
142};
143
144static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
145
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200146static uint32_t g_FrameIndex = 0;
147
Adam Sawickib8333fb2018-03-13 16:15:53 +0100148struct BufferInfo
149{
150 VkBuffer Buffer = VK_NULL_HANDLE;
151 VmaAllocation Allocation = VK_NULL_HANDLE;
152};
153
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200154static uint32_t GetAllocationStrategyCount()
155{
156 uint32_t strategyCount = 0;
157 switch(ConfigType)
158 {
159 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
160 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
161 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
162 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
163 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
164 default: assert(0);
165 }
166 return strategyCount;
167}
168
169static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
170{
171 switch(allocStrategy)
172 {
173 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
174 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
175 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
176 case 0: return "Default"; break;
177 default: assert(0); return "";
178 }
179}
180
Adam Sawickib8333fb2018-03-13 16:15:53 +0100181static void InitResult(Result& outResult)
182{
183 outResult.TotalTime = duration::zero();
184 outResult.AllocationTimeMin = duration::max();
185 outResult.AllocationTimeAvg = duration::zero();
186 outResult.AllocationTimeMax = duration::min();
187 outResult.DeallocationTimeMin = duration::max();
188 outResult.DeallocationTimeAvg = duration::zero();
189 outResult.DeallocationTimeMax = duration::min();
190 outResult.TotalMemoryAllocated = 0;
191 outResult.FreeRangeSizeAvg = 0;
192 outResult.FreeRangeSizeMax = 0;
193}
194
195class TimeRegisterObj
196{
197public:
198 TimeRegisterObj(duration& min, duration& sum, duration& max) :
199 m_Min(min),
200 m_Sum(sum),
201 m_Max(max),
202 m_TimeBeg(std::chrono::high_resolution_clock::now())
203 {
204 }
205
206 ~TimeRegisterObj()
207 {
208 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
209 m_Sum += d;
210 if(d < m_Min) m_Min = d;
211 if(d > m_Max) m_Max = d;
212 }
213
214private:
215 duration& m_Min;
216 duration& m_Sum;
217 duration& m_Max;
218 time_point m_TimeBeg;
219};
220
221struct PoolTestThreadResult
222{
223 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
224 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
225 size_t AllocationCount, DeallocationCount;
226 size_t LostAllocationCount, LostAllocationTotalSize;
227 size_t FailedAllocationCount, FailedAllocationTotalSize;
228};
229
230class AllocationTimeRegisterObj : public TimeRegisterObj
231{
232public:
233 AllocationTimeRegisterObj(Result& result) :
234 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
235 {
236 }
237};
238
239class DeallocationTimeRegisterObj : public TimeRegisterObj
240{
241public:
242 DeallocationTimeRegisterObj(Result& result) :
243 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
244 {
245 }
246};
247
248class PoolAllocationTimeRegisterObj : public TimeRegisterObj
249{
250public:
251 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
252 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
253 {
254 }
255};
256
257class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
258{
259public:
260 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
261 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
262 {
263 }
264};
265
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200266static void CurrentTimeToStr(std::string& out)
267{
268 time_t rawTime; time(&rawTime);
269 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
270 char timeStr[128];
271 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
272 out = timeStr;
273}
274
Adam Sawickib8333fb2018-03-13 16:15:53 +0100275VkResult MainTest(Result& outResult, const Config& config)
276{
277 assert(config.ThreadCount > 0);
278
279 InitResult(outResult);
280
281 RandomNumberGenerator mainRand{config.RandSeed};
282
283 time_point timeBeg = std::chrono::high_resolution_clock::now();
284
285 std::atomic<size_t> allocationCount = 0;
286 VkResult res = VK_SUCCESS;
287
288 uint32_t memUsageProbabilitySum =
289 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
290 config.MemUsageProbability[2] + config.MemUsageProbability[3];
291 assert(memUsageProbabilitySum > 0);
292
293 uint32_t allocationSizeProbabilitySum = std::accumulate(
294 config.AllocationSizes.begin(),
295 config.AllocationSizes.end(),
296 0u,
297 [](uint32_t sum, const AllocationSize& allocSize) {
298 return sum + allocSize.Probability;
299 });
300
301 struct Allocation
302 {
303 VkBuffer Buffer;
304 VkImage Image;
305 VmaAllocation Alloc;
306 };
307
308 std::vector<Allocation> commonAllocations;
309 std::mutex commonAllocationsMutex;
310
311 auto Allocate = [&](
312 VkDeviceSize bufferSize,
313 const VkExtent2D imageExtent,
314 RandomNumberGenerator& localRand,
315 VkDeviceSize& totalAllocatedBytes,
316 std::vector<Allocation>& allocations) -> VkResult
317 {
318 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
319
320 uint32_t memUsageIndex = 0;
321 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
322 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
323 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
324
325 VmaAllocationCreateInfo memReq = {};
326 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200327 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100328
329 Allocation allocation = {};
330 VmaAllocationInfo allocationInfo;
331
332 // Buffer
333 if(bufferSize > 0)
334 {
335 assert(imageExtent.width == 0);
336 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
337 bufferInfo.size = bufferSize;
338 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
339
340 {
341 AllocationTimeRegisterObj timeRegisterObj{outResult};
342 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
343 }
344 }
345 // Image
346 else
347 {
348 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
349 imageInfo.imageType = VK_IMAGE_TYPE_2D;
350 imageInfo.extent.width = imageExtent.width;
351 imageInfo.extent.height = imageExtent.height;
352 imageInfo.extent.depth = 1;
353 imageInfo.mipLevels = 1;
354 imageInfo.arrayLayers = 1;
355 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
356 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
357 VK_IMAGE_TILING_OPTIMAL :
358 VK_IMAGE_TILING_LINEAR;
359 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
360 switch(memReq.usage)
361 {
362 case VMA_MEMORY_USAGE_GPU_ONLY:
363 switch(localRand.Generate() % 3)
364 {
365 case 0:
366 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
367 break;
368 case 1:
369 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
370 break;
371 case 2:
372 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
373 break;
374 }
375 break;
376 case VMA_MEMORY_USAGE_CPU_ONLY:
377 case VMA_MEMORY_USAGE_CPU_TO_GPU:
378 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
379 break;
380 case VMA_MEMORY_USAGE_GPU_TO_CPU:
381 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
382 break;
383 }
384 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
385 imageInfo.flags = 0;
386
387 {
388 AllocationTimeRegisterObj timeRegisterObj{outResult};
389 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
390 }
391 }
392
393 if(res == VK_SUCCESS)
394 {
395 ++allocationCount;
396 totalAllocatedBytes += allocationInfo.size;
397 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
398 if(useCommonAllocations)
399 {
400 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
401 commonAllocations.push_back(allocation);
402 }
403 else
404 allocations.push_back(allocation);
405 }
406 else
407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200408 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100409 }
410 return res;
411 };
412
413 auto GetNextAllocationSize = [&](
414 VkDeviceSize& outBufSize,
415 VkExtent2D& outImageSize,
416 RandomNumberGenerator& localRand)
417 {
418 outBufSize = 0;
419 outImageSize = {0, 0};
420
421 uint32_t allocSizeIndex = 0;
422 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
423 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
424 r -= config.AllocationSizes[allocSizeIndex++].Probability;
425
426 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
427 if(allocSize.BufferSizeMax > 0)
428 {
429 assert(allocSize.ImageSizeMax == 0);
430 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
431 outBufSize = allocSize.BufferSizeMin;
432 else
433 {
434 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
435 outBufSize = outBufSize / 16 * 16;
436 }
437 }
438 else
439 {
440 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
441 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
442 else
443 {
444 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
445 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
446 }
447 }
448 };
449
450 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
451 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
452
453 auto ThreadProc = [&](uint32_t randSeed) -> void
454 {
455 RandomNumberGenerator threadRand(randSeed);
456 VkDeviceSize threadTotalAllocatedBytes = 0;
457 std::vector<Allocation> threadAllocations;
458 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
459 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
460 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
461
462 // BEGIN ALLOCATIONS
463 for(;;)
464 {
465 VkDeviceSize bufferSize = 0;
466 VkExtent2D imageExtent = {};
467 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
468 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
469 threadBeginBytesToAllocate)
470 {
471 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
472 break;
473 }
474 else
475 break;
476 }
477
478 // ADDITIONAL ALLOCATIONS AND FREES
479 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
480 {
481 VkDeviceSize bufferSize = 0;
482 VkExtent2D imageExtent = {};
483 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
484
485 // true = allocate, false = free
486 bool allocate = threadRand.Generate() % 2 != 0;
487
488 if(allocate)
489 {
490 if(threadTotalAllocatedBytes +
491 bufferSize +
492 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
493 threadMaxBytesToAllocate)
494 {
495 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
496 break;
497 }
498 }
499 else
500 {
501 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
502 if(useCommonAllocations)
503 {
504 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
505 if(!commonAllocations.empty())
506 {
507 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
508 VmaAllocationInfo allocationInfo;
509 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
510 if(threadTotalAllocatedBytes >= allocationInfo.size)
511 {
512 DeallocationTimeRegisterObj timeRegisterObj{outResult};
513 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
514 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
515 else
516 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
517 threadTotalAllocatedBytes -= allocationInfo.size;
518 commonAllocations.erase(commonAllocations.begin() + indexToFree);
519 }
520 }
521 }
522 else
523 {
524 if(!threadAllocations.empty())
525 {
526 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
527 VmaAllocationInfo allocationInfo;
528 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
529 if(threadTotalAllocatedBytes >= allocationInfo.size)
530 {
531 DeallocationTimeRegisterObj timeRegisterObj{outResult};
532 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
533 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
534 else
535 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
536 threadTotalAllocatedBytes -= allocationInfo.size;
537 threadAllocations.erase(threadAllocations.begin() + indexToFree);
538 }
539 }
540 }
541 }
542 }
543
544 ++numThreadsReachedMaxAllocations;
545
546 WaitForSingleObject(threadsFinishEvent, INFINITE);
547
548 // DEALLOCATION
549 while(!threadAllocations.empty())
550 {
551 size_t indexToFree = 0;
552 switch(config.FreeOrder)
553 {
554 case FREE_ORDER::FORWARD:
555 indexToFree = 0;
556 break;
557 case FREE_ORDER::BACKWARD:
558 indexToFree = threadAllocations.size() - 1;
559 break;
560 case FREE_ORDER::RANDOM:
561 indexToFree = mainRand.Generate() % threadAllocations.size();
562 break;
563 }
564
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 }
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 };
575
576 uint32_t threadRandSeed = mainRand.Generate();
577 std::vector<std::thread> bkgThreads;
578 for(size_t i = 0; i < config.ThreadCount; ++i)
579 {
580 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
581 }
582
583 // Wait for threads reached max allocations
584 while(numThreadsReachedMaxAllocations < config.ThreadCount)
585 Sleep(0);
586
587 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
588 VmaStats vmaStats = {};
589 vmaCalculateStats(g_hAllocator, &vmaStats);
590 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
591 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
592 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
593
594 // Signal threads to deallocate
595 SetEvent(threadsFinishEvent);
596
597 // Wait for threads finished
598 for(size_t i = 0; i < bkgThreads.size(); ++i)
599 bkgThreads[i].join();
600 bkgThreads.clear();
601
602 CloseHandle(threadsFinishEvent);
603
604 // Deallocate remaining common resources
605 while(!commonAllocations.empty())
606 {
607 size_t indexToFree = 0;
608 switch(config.FreeOrder)
609 {
610 case FREE_ORDER::FORWARD:
611 indexToFree = 0;
612 break;
613 case FREE_ORDER::BACKWARD:
614 indexToFree = commonAllocations.size() - 1;
615 break;
616 case FREE_ORDER::RANDOM:
617 indexToFree = mainRand.Generate() % commonAllocations.size();
618 break;
619 }
620
621 {
622 DeallocationTimeRegisterObj timeRegisterObj{outResult};
623 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
624 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
625 else
626 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
627 }
628 commonAllocations.erase(commonAllocations.begin() + indexToFree);
629 }
630
631 if(allocationCount)
632 {
633 outResult.AllocationTimeAvg /= allocationCount;
634 outResult.DeallocationTimeAvg /= allocationCount;
635 }
636
637 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
638
639 return res;
640}
641
Adam Sawickie44c6262018-06-15 14:30:39 +0200642static void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100643{
644 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200645 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100646 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200647 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100648}
649
650struct AllocInfo
651{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200652 VmaAllocation m_Allocation = VK_NULL_HANDLE;
653 VkBuffer m_Buffer = VK_NULL_HANDLE;
654 VkImage m_Image = VK_NULL_HANDLE;
655 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100656 union
657 {
658 VkBufferCreateInfo m_BufferInfo;
659 VkImageCreateInfo m_ImageInfo;
660 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200661
662 void CreateBuffer(
663 const VkBufferCreateInfo& bufCreateInfo,
664 const VmaAllocationCreateInfo& allocCreateInfo);
665 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100666};
667
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200668void AllocInfo::CreateBuffer(
669 const VkBufferCreateInfo& bufCreateInfo,
670 const VmaAllocationCreateInfo& allocCreateInfo)
671{
672 m_BufferInfo = bufCreateInfo;
673 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
674 TEST(res == VK_SUCCESS);
675}
676
677void AllocInfo::Destroy()
678{
679 if(m_Image)
680 {
681 vkDestroyImage(g_hDevice, m_Image, nullptr);
682 }
683 if(m_Buffer)
684 {
685 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
686 }
687 if(m_Allocation)
688 {
689 vmaFreeMemory(g_hAllocator, m_Allocation);
690 }
691}
692
Adam Sawickif2975342018-10-16 13:49:02 +0200693class StagingBufferCollection
694{
695public:
696 StagingBufferCollection() { }
697 ~StagingBufferCollection();
698 // Returns false if maximum total size of buffers would be exceeded.
699 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
700 void ReleaseAllBuffers();
701
702private:
703 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
704 struct BufInfo
705 {
706 VmaAllocation Allocation = VK_NULL_HANDLE;
707 VkBuffer Buffer = VK_NULL_HANDLE;
708 VkDeviceSize Size = VK_WHOLE_SIZE;
709 void* MappedPtr = nullptr;
710 bool Used = false;
711 };
712 std::vector<BufInfo> m_Bufs;
713 // Including both used and unused.
714 VkDeviceSize m_TotalSize = 0;
715};
716
717StagingBufferCollection::~StagingBufferCollection()
718{
719 for(size_t i = m_Bufs.size(); i--; )
720 {
721 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
722 }
723}
724
725bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
726{
727 assert(size <= MAX_TOTAL_SIZE);
728
729 // Try to find existing unused buffer with best size.
730 size_t bestIndex = SIZE_MAX;
731 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
732 {
733 BufInfo& currBufInfo = m_Bufs[i];
734 if(!currBufInfo.Used && currBufInfo.Size >= size &&
735 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
736 {
737 bestIndex = i;
738 }
739 }
740
741 if(bestIndex != SIZE_MAX)
742 {
743 m_Bufs[bestIndex].Used = true;
744 outBuffer = m_Bufs[bestIndex].Buffer;
745 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
746 return true;
747 }
748
749 // Allocate new buffer with requested size.
750 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
751 {
752 BufInfo bufInfo;
753 bufInfo.Size = size;
754 bufInfo.Used = true;
755
756 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
757 bufCreateInfo.size = size;
758 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
759
760 VmaAllocationCreateInfo allocCreateInfo = {};
761 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
762 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
763
764 VmaAllocationInfo allocInfo;
765 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
766 bufInfo.MappedPtr = allocInfo.pMappedData;
767 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
768
769 outBuffer = bufInfo.Buffer;
770 outMappedPtr = bufInfo.MappedPtr;
771
772 m_Bufs.push_back(std::move(bufInfo));
773
774 m_TotalSize += size;
775
776 return true;
777 }
778
779 // There are some unused but smaller buffers: Free them and try again.
780 bool hasUnused = false;
781 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
782 {
783 if(!m_Bufs[i].Used)
784 {
785 hasUnused = true;
786 break;
787 }
788 }
789 if(hasUnused)
790 {
791 for(size_t i = m_Bufs.size(); i--; )
792 {
793 if(!m_Bufs[i].Used)
794 {
795 m_TotalSize -= m_Bufs[i].Size;
796 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
797 m_Bufs.erase(m_Bufs.begin() + i);
798 }
799 }
800
801 return AcquireBuffer(size, outBuffer, outMappedPtr);
802 }
803
804 return false;
805}
806
807void StagingBufferCollection::ReleaseAllBuffers()
808{
809 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
810 {
811 m_Bufs[i].Used = false;
812 }
813}
814
815static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
816{
817 StagingBufferCollection stagingBufs;
818
819 bool cmdBufferStarted = false;
820 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
821 {
822 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
823 if(currAllocInfo.m_Buffer)
824 {
825 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
826
827 VkBuffer stagingBuf = VK_NULL_HANDLE;
828 void* stagingBufMappedPtr = nullptr;
829 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
830 {
831 TEST(cmdBufferStarted);
832 EndSingleTimeCommands();
833 stagingBufs.ReleaseAllBuffers();
834 cmdBufferStarted = false;
835
836 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
837 TEST(ok);
838 }
839
840 // Fill staging buffer.
841 {
842 assert(size % sizeof(uint32_t) == 0);
843 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
844 uint32_t val = currAllocInfo.m_StartValue;
845 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
846 {
847 *stagingValPtr = val;
848 ++stagingValPtr;
849 ++val;
850 }
851 }
852
853 // Issue copy command from staging buffer to destination buffer.
854 if(!cmdBufferStarted)
855 {
856 cmdBufferStarted = true;
857 BeginSingleTimeCommands();
858 }
859
860 VkBufferCopy copy = {};
861 copy.srcOffset = 0;
862 copy.dstOffset = 0;
863 copy.size = size;
864 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
865 }
866 else
867 {
868 TEST(0 && "Images not currently supported.");
869 }
870 }
871
872 if(cmdBufferStarted)
873 {
874 EndSingleTimeCommands();
875 stagingBufs.ReleaseAllBuffers();
876 }
877}
878
879static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
880{
881 StagingBufferCollection stagingBufs;
882
883 bool cmdBufferStarted = false;
884 size_t validateAllocIndexOffset = 0;
885 std::vector<void*> validateStagingBuffers;
886 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
887 {
888 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
889 if(currAllocInfo.m_Buffer)
890 {
891 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
892
893 VkBuffer stagingBuf = VK_NULL_HANDLE;
894 void* stagingBufMappedPtr = nullptr;
895 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
896 {
897 TEST(cmdBufferStarted);
898 EndSingleTimeCommands();
899 cmdBufferStarted = false;
900
901 for(size_t validateIndex = 0;
902 validateIndex < validateStagingBuffers.size();
903 ++validateIndex)
904 {
905 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
906 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
907 TEST(validateSize % sizeof(uint32_t) == 0);
908 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
909 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
910 bool valid = true;
911 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
912 {
913 if(*stagingValPtr != val)
914 {
915 valid = false;
916 break;
917 }
918 ++stagingValPtr;
919 ++val;
920 }
921 TEST(valid);
922 }
923
924 stagingBufs.ReleaseAllBuffers();
925
926 validateAllocIndexOffset = allocInfoIndex;
927 validateStagingBuffers.clear();
928
929 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
930 TEST(ok);
931 }
932
933 // Issue copy command from staging buffer to destination buffer.
934 if(!cmdBufferStarted)
935 {
936 cmdBufferStarted = true;
937 BeginSingleTimeCommands();
938 }
939
940 VkBufferCopy copy = {};
941 copy.srcOffset = 0;
942 copy.dstOffset = 0;
943 copy.size = size;
944 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
945
946 // Sava mapped pointer for later validation.
947 validateStagingBuffers.push_back(stagingBufMappedPtr);
948 }
949 else
950 {
951 TEST(0 && "Images not currently supported.");
952 }
953 }
954
955 if(cmdBufferStarted)
956 {
957 EndSingleTimeCommands();
958
959 for(size_t validateIndex = 0;
960 validateIndex < validateStagingBuffers.size();
961 ++validateIndex)
962 {
963 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
964 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
965 TEST(validateSize % sizeof(uint32_t) == 0);
966 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
967 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
968 bool valid = true;
969 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
970 {
971 if(*stagingValPtr != val)
972 {
973 valid = false;
974 break;
975 }
976 ++stagingValPtr;
977 ++val;
978 }
979 TEST(valid);
980 }
981
982 stagingBufs.ReleaseAllBuffers();
983 }
984}
985
Adam Sawickib8333fb2018-03-13 16:15:53 +0100986static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
987{
988 outMemReq = {};
989 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
990 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
991}
992
993static void CreateBuffer(
994 VmaPool pool,
995 const VkBufferCreateInfo& bufCreateInfo,
996 bool persistentlyMapped,
997 AllocInfo& outAllocInfo)
998{
999 outAllocInfo = {};
1000 outAllocInfo.m_BufferInfo = bufCreateInfo;
1001
1002 VmaAllocationCreateInfo allocCreateInfo = {};
1003 allocCreateInfo.pool = pool;
1004 if(persistentlyMapped)
1005 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1006
1007 VmaAllocationInfo vmaAllocInfo = {};
1008 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1009
1010 // Setup StartValue and fill.
1011 {
1012 outAllocInfo.m_StartValue = (uint32_t)rand();
1013 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001014 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015 if(!persistentlyMapped)
1016 {
1017 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1018 }
1019
1020 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001021 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001022 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1023 data[i] = value++;
1024
1025 if(!persistentlyMapped)
1026 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1027 }
1028}
1029
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001030static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001031{
1032 outAllocation.m_Allocation = nullptr;
1033 outAllocation.m_Buffer = nullptr;
1034 outAllocation.m_Image = nullptr;
1035 outAllocation.m_StartValue = (uint32_t)rand();
1036
1037 VmaAllocationCreateInfo vmaMemReq;
1038 GetMemReq(vmaMemReq);
1039
1040 VmaAllocationInfo allocInfo;
1041
1042 const bool isBuffer = true;//(rand() & 0x1) != 0;
1043 const bool isLarge = (rand() % 16) == 0;
1044 if(isBuffer)
1045 {
1046 const uint32_t bufferSize = isLarge ?
1047 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1048 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1049
1050 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1051 bufferInfo.size = bufferSize;
1052 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1053
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001054 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001055 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001056 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001057 }
1058 else
1059 {
1060 const uint32_t imageSizeX = isLarge ?
1061 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1062 rand() % 1024 + 1; // 1 ... 1024
1063 const uint32_t imageSizeY = isLarge ?
1064 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1065 rand() % 1024 + 1; // 1 ... 1024
1066
1067 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1068 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1069 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1070 imageInfo.extent.width = imageSizeX;
1071 imageInfo.extent.height = imageSizeY;
1072 imageInfo.extent.depth = 1;
1073 imageInfo.mipLevels = 1;
1074 imageInfo.arrayLayers = 1;
1075 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1076 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1077 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1078 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1079
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001080 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001081 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001082 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 }
1084
1085 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1086 if(allocInfo.pMappedData == nullptr)
1087 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001088 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001089 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001090 }
1091
1092 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001093 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001094 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1095 data[i] = value++;
1096
1097 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001098 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001099}
1100
1101static void DestroyAllocation(const AllocInfo& allocation)
1102{
1103 if(allocation.m_Buffer)
1104 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1105 else
1106 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1107}
1108
1109static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1110{
1111 for(size_t i = allocations.size(); i--; )
1112 DestroyAllocation(allocations[i]);
1113 allocations.clear();
1114}
1115
1116static void ValidateAllocationData(const AllocInfo& allocation)
1117{
1118 VmaAllocationInfo allocInfo;
1119 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1120
1121 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1122 if(allocInfo.pMappedData == nullptr)
1123 {
1124 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126 }
1127
1128 uint32_t value = allocation.m_StartValue;
1129 bool ok = true;
1130 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001131 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001132 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1133 {
1134 if(data[i] != value++)
1135 {
1136 ok = false;
1137 break;
1138 }
1139 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001140 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001141
1142 if(allocInfo.pMappedData == nullptr)
1143 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1144}
1145
1146static void RecreateAllocationResource(AllocInfo& allocation)
1147{
1148 VmaAllocationInfo allocInfo;
1149 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1150
1151 if(allocation.m_Buffer)
1152 {
1153 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1154
1155 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001156 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001157
1158 // Just to silence validation layer warnings.
1159 VkMemoryRequirements vkMemReq;
1160 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001161 TEST(vkMemReq.size == allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001162
1163 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001164 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001165 }
1166 else
1167 {
1168 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1169
1170 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001171 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001172
1173 // Just to silence validation layer warnings.
1174 VkMemoryRequirements vkMemReq;
1175 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1176
1177 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001178 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001179 }
1180}
1181
1182static void Defragment(AllocInfo* allocs, size_t allocCount,
1183 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1184 VmaDefragmentationStats* defragmentationStats = nullptr)
1185{
1186 std::vector<VmaAllocation> vmaAllocs(allocCount);
1187 for(size_t i = 0; i < allocCount; ++i)
1188 vmaAllocs[i] = allocs[i].m_Allocation;
1189
1190 std::vector<VkBool32> allocChanged(allocCount);
1191
1192 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1193 defragmentationInfo, defragmentationStats) );
1194
1195 for(size_t i = 0; i < allocCount; ++i)
1196 {
1197 if(allocChanged[i])
1198 {
1199 RecreateAllocationResource(allocs[i]);
1200 }
1201 }
1202}
1203
1204static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1205{
1206 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1207 ValidateAllocationData(allocInfo);
1208 });
1209}
1210
1211void TestDefragmentationSimple()
1212{
1213 wprintf(L"Test defragmentation simple\n");
1214
1215 RandomNumberGenerator rand(667);
1216
1217 const VkDeviceSize BUF_SIZE = 0x10000;
1218 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1219
1220 const VkDeviceSize MIN_BUF_SIZE = 32;
1221 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1222 auto RandomBufSize = [&]() -> VkDeviceSize {
1223 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1224 };
1225
1226 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1227 bufCreateInfo.size = BUF_SIZE;
1228 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1229
1230 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1231 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1232
1233 uint32_t memTypeIndex = UINT32_MAX;
1234 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1235
1236 VmaPoolCreateInfo poolCreateInfo = {};
1237 poolCreateInfo.blockSize = BLOCK_SIZE;
1238 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1239
1240 VmaPool pool;
1241 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1242
1243 std::vector<AllocInfo> allocations;
1244
1245 // persistentlyMappedOption = 0 - not persistently mapped.
1246 // persistentlyMappedOption = 1 - persistently mapped.
1247 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1248 {
1249 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1250 const bool persistentlyMapped = persistentlyMappedOption != 0;
1251
1252 // # Test 1
1253 // Buffers of fixed size.
1254 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1255 // Expected result: at least 1 block freed.
1256 {
1257 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1258 {
1259 AllocInfo allocInfo;
1260 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1261 allocations.push_back(allocInfo);
1262 }
1263
1264 for(size_t i = 1; i < allocations.size(); ++i)
1265 {
1266 DestroyAllocation(allocations[i]);
1267 allocations.erase(allocations.begin() + i);
1268 }
1269
1270 VmaDefragmentationStats defragStats;
1271 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001272 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1273 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001274
1275 ValidateAllocationsData(allocations.data(), allocations.size());
1276
1277 DestroyAllAllocations(allocations);
1278 }
1279
1280 // # Test 2
1281 // Buffers of fixed size.
1282 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1283 // Expected result: Each of 4 interations makes some progress.
1284 {
1285 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1286 {
1287 AllocInfo allocInfo;
1288 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1289 allocations.push_back(allocInfo);
1290 }
1291
1292 for(size_t i = 1; i < allocations.size(); ++i)
1293 {
1294 DestroyAllocation(allocations[i]);
1295 allocations.erase(allocations.begin() + i);
1296 }
1297
1298 VmaDefragmentationInfo defragInfo = {};
1299 defragInfo.maxAllocationsToMove = 1;
1300 defragInfo.maxBytesToMove = BUF_SIZE;
1301
1302 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1303 {
1304 VmaDefragmentationStats defragStats;
1305 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001306 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001307 }
1308
1309 ValidateAllocationsData(allocations.data(), allocations.size());
1310
1311 DestroyAllAllocations(allocations);
1312 }
1313
1314 // # Test 3
1315 // Buffers of variable size.
1316 // Create a number of buffers. Remove some percent of them.
1317 // Defragment while having some percent of them unmovable.
1318 // Expected result: Just simple validation.
1319 {
1320 for(size_t i = 0; i < 100; ++i)
1321 {
1322 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1323 localBufCreateInfo.size = RandomBufSize();
1324
1325 AllocInfo allocInfo;
1326 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1327 allocations.push_back(allocInfo);
1328 }
1329
1330 const uint32_t percentToDelete = 60;
1331 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1332 for(size_t i = 0; i < numberToDelete; ++i)
1333 {
1334 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1335 DestroyAllocation(allocations[indexToDelete]);
1336 allocations.erase(allocations.begin() + indexToDelete);
1337 }
1338
1339 // Non-movable allocations will be at the beginning of allocations array.
1340 const uint32_t percentNonMovable = 20;
1341 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1342 for(size_t i = 0; i < numberNonMovable; ++i)
1343 {
1344 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1345 if(indexNonMovable != i)
1346 std::swap(allocations[i], allocations[indexNonMovable]);
1347 }
1348
1349 VmaDefragmentationStats defragStats;
1350 Defragment(
1351 allocations.data() + numberNonMovable,
1352 allocations.size() - numberNonMovable,
1353 nullptr, &defragStats);
1354
1355 ValidateAllocationsData(allocations.data(), allocations.size());
1356
1357 DestroyAllAllocations(allocations);
1358 }
1359 }
1360
1361 vmaDestroyPool(g_hAllocator, pool);
1362}
1363
1364void TestDefragmentationFull()
1365{
1366 std::vector<AllocInfo> allocations;
1367
1368 // Create initial allocations.
1369 for(size_t i = 0; i < 400; ++i)
1370 {
1371 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001372 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001373 allocations.push_back(allocation);
1374 }
1375
1376 // Delete random allocations
1377 const size_t allocationsToDeletePercent = 80;
1378 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1379 for(size_t i = 0; i < allocationsToDelete; ++i)
1380 {
1381 size_t index = (size_t)rand() % allocations.size();
1382 DestroyAllocation(allocations[index]);
1383 allocations.erase(allocations.begin() + index);
1384 }
1385
1386 for(size_t i = 0; i < allocations.size(); ++i)
1387 ValidateAllocationData(allocations[i]);
1388
Adam Sawicki0667e332018-08-24 17:26:44 +02001389 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001390
1391 {
1392 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1393 for(size_t i = 0; i < allocations.size(); ++i)
1394 vmaAllocations[i] = allocations[i].m_Allocation;
1395
1396 const size_t nonMovablePercent = 0;
1397 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1398 for(size_t i = 0; i < nonMovableCount; ++i)
1399 {
1400 size_t index = (size_t)rand() % vmaAllocations.size();
1401 vmaAllocations.erase(vmaAllocations.begin() + index);
1402 }
1403
1404 const uint32_t defragCount = 1;
1405 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1406 {
1407 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1408
1409 VmaDefragmentationInfo defragmentationInfo;
1410 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1411 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1412
1413 wprintf(L"Defragmentation #%u\n", defragIndex);
1414
1415 time_point begTime = std::chrono::high_resolution_clock::now();
1416
1417 VmaDefragmentationStats stats;
1418 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001419 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001420
1421 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1422
1423 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1424 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1425 wprintf(L"Time: %.2f s\n", defragmentDuration);
1426
1427 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1428 {
1429 if(allocationsChanged[i])
1430 {
1431 RecreateAllocationResource(allocations[i]);
1432 }
1433 }
1434
1435 for(size_t i = 0; i < allocations.size(); ++i)
1436 ValidateAllocationData(allocations[i]);
1437
Adam Sawicki0667e332018-08-24 17:26:44 +02001438 //wchar_t fileName[MAX_PATH];
1439 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1440 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001441 }
1442 }
1443
1444 // Destroy all remaining allocations.
1445 DestroyAllAllocations(allocations);
1446}
1447
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001448static void TestDefragmentationGpu()
1449{
1450 wprintf(L"Test defragmentation GPU\n");
1451
1452 std::vector<AllocInfo> allocations;
1453
1454 // Create that many allocations to surely fill 3 new blocks of 256 MB.
1455 const VkDeviceSize bufSize = 10ull * 1024 * 1024;
1456 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
1457 const size_t bufCount = (size_t)(totalSize / bufSize);
1458 const size_t percentToLeave = 20;
1459 RandomNumberGenerator rand = { 234522 };
1460
1461 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1462 bufCreateInfo.size = bufSize;
1463 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1464
1465 VmaAllocationCreateInfo allocCreateInfo = {};
1466 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1467 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1468 allocCreateInfo.pUserData = "TestDefragmentationGpu";
1469
1470 // Create all intended buffers.
1471 for(size_t i = 0; i < bufCount; ++i)
1472 {
1473 AllocInfo alloc;
1474 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1475 alloc.m_StartValue = rand.Generate();
1476 allocations.push_back(alloc);
1477 }
1478
1479 // Destroy some percentage of them.
1480 {
1481 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1482 for(size_t i = 0; i < buffersToDestroy; ++i)
1483 {
1484 const size_t index = rand.Generate() % allocations.size();
1485 allocations[index].Destroy();
1486 allocations.erase(allocations.begin() + index);
1487 }
1488 }
1489
1490 // Fill them with meaningful data.
1491 UploadGpuData(allocations.data(), allocations.size());
1492
1493 SaveAllocatorStatsToFile(L"GPU_defragmentation_A_before.json");
1494
1495 // Defragment using GPU only.
1496 {
1497 const size_t allocCount = allocations.size();
1498 std::vector<VmaAllocation> allocationPtrs(allocCount);
1499 std::vector<VkBool32> allocationChanged(allocCount);
1500 for(size_t i = 0; i < allocCount; ++i)
1501 {
1502 allocationPtrs[i] = allocations[i].m_Allocation;
1503 allocationChanged[i] = VK_FALSE;
1504 }
1505
1506 BeginSingleTimeCommands();
1507
1508 VmaDefragmentationInfo2 defragInfo = {};
1509 defragInfo.allocationCount = (uint32_t)allocCount;
1510 defragInfo.pAllocations = allocationPtrs.data();
1511 defragInfo.maxGpuBytesToMove = 0;//VK_WHOLE_SIZE;
1512 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1513 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1514
1515 VmaDefragmentationStats stats = {};
1516 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1517 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1518 TEST(res >= VK_SUCCESS);
1519
1520 EndSingleTimeCommands();
1521
1522 vmaDefragmentationEnd(g_hAllocator, ctx);
1523
1524 for(size_t i = 0; i < allocCount; ++i)
1525 {
1526 if(allocationChanged[i])
1527 {
1528 RecreateAllocationResource(allocations[i]);
1529 }
1530 }
1531
1532 //TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1533 //TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
1534 //TEST(stats.allocationsLost == 0);
1535 }
1536
1537 ValidateGpuData(allocations.data(), allocations.size());
1538
1539 SaveAllocatorStatsToFile(L"GPU_defragmentation_B_after.json");
1540
1541 // Destroy all remaining buffers.
1542 for(size_t i = allocations.size(); i--; )
1543 {
1544 allocations[i].Destroy();
1545 }
1546}
1547
Adam Sawickib8333fb2018-03-13 16:15:53 +01001548static void TestUserData()
1549{
1550 VkResult res;
1551
1552 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1553 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1554 bufCreateInfo.size = 0x10000;
1555
1556 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1557 {
1558 // Opaque pointer
1559 {
1560
1561 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1562 void* pointerToSomething = &res;
1563
1564 VmaAllocationCreateInfo allocCreateInfo = {};
1565 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1566 allocCreateInfo.pUserData = numberAsPointer;
1567 if(testIndex == 1)
1568 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1569
1570 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1571 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001572 TEST(res == VK_SUCCESS);
1573 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001574
1575 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001576 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001577
1578 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1579 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001580 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001581
1582 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1583 }
1584
1585 // String
1586 {
1587 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1588 const char* name2 = "2";
1589 const size_t name1Len = strlen(name1);
1590
1591 char* name1Buf = new char[name1Len + 1];
1592 strcpy_s(name1Buf, name1Len + 1, name1);
1593
1594 VmaAllocationCreateInfo allocCreateInfo = {};
1595 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1596 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1597 allocCreateInfo.pUserData = name1Buf;
1598 if(testIndex == 1)
1599 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1600
1601 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1602 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001603 TEST(res == VK_SUCCESS);
1604 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1605 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001606
1607 delete[] name1Buf;
1608
1609 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001610 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001611
1612 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1613 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001614 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001615
1616 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1617 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001618 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001619
1620 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1621 }
1622 }
1623}
1624
1625static void TestMemoryRequirements()
1626{
1627 VkResult res;
1628 VkBuffer buf;
1629 VmaAllocation alloc;
1630 VmaAllocationInfo allocInfo;
1631
1632 const VkPhysicalDeviceMemoryProperties* memProps;
1633 vmaGetMemoryProperties(g_hAllocator, &memProps);
1634
1635 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1636 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1637 bufInfo.size = 128;
1638
1639 VmaAllocationCreateInfo allocCreateInfo = {};
1640
1641 // No requirements.
1642 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001643 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001644 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1645
1646 // Usage.
1647 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1648 allocCreateInfo.requiredFlags = 0;
1649 allocCreateInfo.preferredFlags = 0;
1650 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1651
1652 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001653 TEST(res == VK_SUCCESS);
1654 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001655 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1656
1657 // Required flags, preferred flags.
1658 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1659 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1660 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1661 allocCreateInfo.memoryTypeBits = 0;
1662
1663 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001664 TEST(res == VK_SUCCESS);
1665 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1666 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001667 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1668
1669 // memoryTypeBits.
1670 const uint32_t memType = allocInfo.memoryType;
1671 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1672 allocCreateInfo.requiredFlags = 0;
1673 allocCreateInfo.preferredFlags = 0;
1674 allocCreateInfo.memoryTypeBits = 1u << memType;
1675
1676 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001677 TEST(res == VK_SUCCESS);
1678 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001679 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1680
1681}
1682
1683static void TestBasics()
1684{
1685 VkResult res;
1686
1687 TestMemoryRequirements();
1688
1689 // Lost allocation
1690 {
1691 VmaAllocation alloc = VK_NULL_HANDLE;
1692 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001693 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001694
1695 VmaAllocationInfo allocInfo;
1696 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001697 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1698 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001699
1700 vmaFreeMemory(g_hAllocator, alloc);
1701 }
1702
1703 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1704 {
1705 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1706 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1707 bufCreateInfo.size = 128;
1708
1709 VmaAllocationCreateInfo allocCreateInfo = {};
1710 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1711 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1712
1713 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1714 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001715 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001716
1717 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1718
1719 // Same with OWN_MEMORY.
1720 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1721
1722 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001723 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001724
1725 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1726 }
1727
1728 TestUserData();
1729}
1730
1731void TestHeapSizeLimit()
1732{
1733 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1734 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1735
1736 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1737 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1738 {
1739 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1740 }
1741
1742 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1743 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1744 allocatorCreateInfo.device = g_hDevice;
1745 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1746
1747 VmaAllocator hAllocator;
1748 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001749 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001750
1751 struct Item
1752 {
1753 VkBuffer hBuf;
1754 VmaAllocation hAlloc;
1755 };
1756 std::vector<Item> items;
1757
1758 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1759 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1760
1761 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
1762 VmaAllocationInfo ownAllocInfo;
1763 {
1764 VmaAllocationCreateInfo allocCreateInfo = {};
1765 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1766 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1767
1768 bufCreateInfo.size = BLOCK_SIZE / 2;
1769
1770 for(size_t i = 0; i < 2; ++i)
1771 {
1772 Item item;
1773 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001774 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001775 items.push_back(item);
1776 }
1777 }
1778
1779 // Create pool to make sure allocations must be out of this memory type.
1780 VmaPoolCreateInfo poolCreateInfo = {};
1781 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
1782 poolCreateInfo.blockSize = BLOCK_SIZE;
1783
1784 VmaPool hPool;
1785 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001786 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001787
1788 // 2. Allocate normal buffers from all the remaining memory.
1789 {
1790 VmaAllocationCreateInfo allocCreateInfo = {};
1791 allocCreateInfo.pool = hPool;
1792
1793 bufCreateInfo.size = BLOCK_SIZE / 2;
1794
1795 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
1796 for(size_t i = 0; i < bufCount; ++i)
1797 {
1798 Item item;
1799 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001800 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001801 items.push_back(item);
1802 }
1803 }
1804
1805 // 3. Allocation of one more (even small) buffer should fail.
1806 {
1807 VmaAllocationCreateInfo allocCreateInfo = {};
1808 allocCreateInfo.pool = hPool;
1809
1810 bufCreateInfo.size = 128;
1811
1812 VkBuffer hBuf;
1813 VmaAllocation hAlloc;
1814 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001815 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001816 }
1817
1818 // Destroy everything.
1819 for(size_t i = items.size(); i--; )
1820 {
1821 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
1822 }
1823
1824 vmaDestroyPool(hAllocator, hPool);
1825
1826 vmaDestroyAllocator(hAllocator);
1827}
1828
Adam Sawicki212a4a62018-06-14 15:44:45 +02001829#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02001830static void TestDebugMargin()
1831{
1832 if(VMA_DEBUG_MARGIN == 0)
1833 {
1834 return;
1835 }
1836
1837 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02001838 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02001839
1840 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02001841 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02001842
1843 // Create few buffers of different size.
1844 const size_t BUF_COUNT = 10;
1845 BufferInfo buffers[BUF_COUNT];
1846 VmaAllocationInfo allocInfo[BUF_COUNT];
1847 for(size_t i = 0; i < 10; ++i)
1848 {
1849 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02001850 // Last one will be mapped.
1851 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02001852
1853 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001854 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02001855 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001856 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001857
1858 if(i == BUF_COUNT - 1)
1859 {
1860 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001861 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001862 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
1863 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
1864 }
Adam Sawicki73b16652018-06-11 16:39:25 +02001865 }
1866
1867 // Check if their offsets preserve margin between them.
1868 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
1869 {
1870 if(lhs.deviceMemory != rhs.deviceMemory)
1871 {
1872 return lhs.deviceMemory < rhs.deviceMemory;
1873 }
1874 return lhs.offset < rhs.offset;
1875 });
1876 for(size_t i = 1; i < BUF_COUNT; ++i)
1877 {
1878 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
1879 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02001880 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02001881 }
1882 }
1883
Adam Sawicki212a4a62018-06-14 15:44:45 +02001884 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001885 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001886
Adam Sawicki73b16652018-06-11 16:39:25 +02001887 // Destroy all buffers.
1888 for(size_t i = BUF_COUNT; i--; )
1889 {
1890 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
1891 }
1892}
Adam Sawicki212a4a62018-06-14 15:44:45 +02001893#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02001894
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001895static void TestLinearAllocator()
1896{
1897 wprintf(L"Test linear allocator\n");
1898
1899 RandomNumberGenerator rand{645332};
1900
1901 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1902 sampleBufCreateInfo.size = 1024; // Whatever.
1903 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1904
1905 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
1906 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1907
1908 VmaPoolCreateInfo poolCreateInfo = {};
1909 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001910 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001911
Adam Sawickiee082772018-06-20 17:45:49 +02001912 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001913 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
1914 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
1915
1916 VmaPool pool = nullptr;
1917 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001918 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001919
1920 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
1921
1922 VmaAllocationCreateInfo allocCreateInfo = {};
1923 allocCreateInfo.pool = pool;
1924
1925 constexpr size_t maxBufCount = 100;
1926 std::vector<BufferInfo> bufInfo;
1927
1928 constexpr VkDeviceSize bufSizeMin = 16;
1929 constexpr VkDeviceSize bufSizeMax = 1024;
1930 VmaAllocationInfo allocInfo;
1931 VkDeviceSize prevOffset = 0;
1932
1933 // Test one-time free.
1934 for(size_t i = 0; i < 2; ++i)
1935 {
1936 // Allocate number of buffers of varying size that surely fit into this block.
1937 VkDeviceSize bufSumSize = 0;
1938 for(size_t i = 0; i < maxBufCount; ++i)
1939 {
1940 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1941 BufferInfo newBufInfo;
1942 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1943 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001944 TEST(res == VK_SUCCESS);
1945 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001946 bufInfo.push_back(newBufInfo);
1947 prevOffset = allocInfo.offset;
1948 bufSumSize += bufCreateInfo.size;
1949 }
1950
1951 // Validate pool stats.
1952 VmaPoolStats stats;
1953 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001954 TEST(stats.size == poolCreateInfo.blockSize);
1955 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
1956 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001957
1958 // Destroy the buffers in random order.
1959 while(!bufInfo.empty())
1960 {
1961 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
1962 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
1963 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1964 bufInfo.erase(bufInfo.begin() + indexToDestroy);
1965 }
1966 }
1967
1968 // Test stack.
1969 {
1970 // Allocate number of buffers of varying size that surely fit into this block.
1971 for(size_t i = 0; i < maxBufCount; ++i)
1972 {
1973 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1974 BufferInfo newBufInfo;
1975 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1976 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001977 TEST(res == VK_SUCCESS);
1978 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001979 bufInfo.push_back(newBufInfo);
1980 prevOffset = allocInfo.offset;
1981 }
1982
1983 // Destroy few buffers from top of the stack.
1984 for(size_t i = 0; i < maxBufCount / 5; ++i)
1985 {
1986 const BufferInfo& currBufInfo = bufInfo.back();
1987 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1988 bufInfo.pop_back();
1989 }
1990
1991 // Create some more
1992 for(size_t i = 0; i < maxBufCount / 5; ++i)
1993 {
1994 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1995 BufferInfo newBufInfo;
1996 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1997 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001998 TEST(res == VK_SUCCESS);
1999 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002000 bufInfo.push_back(newBufInfo);
2001 prevOffset = allocInfo.offset;
2002 }
2003
2004 // Destroy the buffers in reverse order.
2005 while(!bufInfo.empty())
2006 {
2007 const BufferInfo& currBufInfo = bufInfo.back();
2008 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2009 bufInfo.pop_back();
2010 }
2011 }
2012
Adam Sawickiee082772018-06-20 17:45:49 +02002013 // Test ring buffer.
2014 {
2015 // Allocate number of buffers that surely fit into this block.
2016 bufCreateInfo.size = bufSizeMax;
2017 for(size_t i = 0; i < maxBufCount; ++i)
2018 {
2019 BufferInfo newBufInfo;
2020 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2021 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002022 TEST(res == VK_SUCCESS);
2023 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002024 bufInfo.push_back(newBufInfo);
2025 prevOffset = allocInfo.offset;
2026 }
2027
2028 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2029 const size_t buffersPerIter = maxBufCount / 10 - 1;
2030 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2031 for(size_t iter = 0; iter < iterCount; ++iter)
2032 {
2033 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2034 {
2035 const BufferInfo& currBufInfo = bufInfo.front();
2036 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2037 bufInfo.erase(bufInfo.begin());
2038 }
2039 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2040 {
2041 BufferInfo newBufInfo;
2042 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2043 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002044 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002045 bufInfo.push_back(newBufInfo);
2046 }
2047 }
2048
2049 // Allocate buffers until we reach out-of-memory.
2050 uint32_t debugIndex = 0;
2051 while(res == VK_SUCCESS)
2052 {
2053 BufferInfo newBufInfo;
2054 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2055 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2056 if(res == VK_SUCCESS)
2057 {
2058 bufInfo.push_back(newBufInfo);
2059 }
2060 else
2061 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002062 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002063 }
2064 ++debugIndex;
2065 }
2066
2067 // Destroy the buffers in random order.
2068 while(!bufInfo.empty())
2069 {
2070 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2071 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2072 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2073 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2074 }
2075 }
2076
Adam Sawicki680b2252018-08-22 14:47:32 +02002077 // Test double stack.
2078 {
2079 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2080 VkDeviceSize prevOffsetLower = 0;
2081 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2082 for(size_t i = 0; i < maxBufCount; ++i)
2083 {
2084 const bool upperAddress = (i % 2) != 0;
2085 if(upperAddress)
2086 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2087 else
2088 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2089 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2090 BufferInfo newBufInfo;
2091 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2092 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002093 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002094 if(upperAddress)
2095 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002096 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002097 prevOffsetUpper = allocInfo.offset;
2098 }
2099 else
2100 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002101 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002102 prevOffsetLower = allocInfo.offset;
2103 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002104 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002105 bufInfo.push_back(newBufInfo);
2106 }
2107
2108 // Destroy few buffers from top of the stack.
2109 for(size_t i = 0; i < maxBufCount / 5; ++i)
2110 {
2111 const BufferInfo& currBufInfo = bufInfo.back();
2112 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2113 bufInfo.pop_back();
2114 }
2115
2116 // Create some more
2117 for(size_t i = 0; i < maxBufCount / 5; ++i)
2118 {
2119 const bool upperAddress = (i % 2) != 0;
2120 if(upperAddress)
2121 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2122 else
2123 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2124 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2125 BufferInfo newBufInfo;
2126 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2127 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002128 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002129 bufInfo.push_back(newBufInfo);
2130 }
2131
2132 // Destroy the buffers in reverse order.
2133 while(!bufInfo.empty())
2134 {
2135 const BufferInfo& currBufInfo = bufInfo.back();
2136 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2137 bufInfo.pop_back();
2138 }
2139
2140 // Create buffers on both sides until we reach out of memory.
2141 prevOffsetLower = 0;
2142 prevOffsetUpper = poolCreateInfo.blockSize;
2143 res = VK_SUCCESS;
2144 for(size_t i = 0; res == VK_SUCCESS; ++i)
2145 {
2146 const bool upperAddress = (i % 2) != 0;
2147 if(upperAddress)
2148 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2149 else
2150 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2151 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2152 BufferInfo newBufInfo;
2153 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2154 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2155 if(res == VK_SUCCESS)
2156 {
2157 if(upperAddress)
2158 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002159 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002160 prevOffsetUpper = allocInfo.offset;
2161 }
2162 else
2163 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002164 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002165 prevOffsetLower = allocInfo.offset;
2166 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002167 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002168 bufInfo.push_back(newBufInfo);
2169 }
2170 }
2171
2172 // Destroy the buffers in random order.
2173 while(!bufInfo.empty())
2174 {
2175 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2176 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2177 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2178 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2179 }
2180
2181 // Create buffers on upper side only, constant size, until we reach out of memory.
2182 prevOffsetUpper = poolCreateInfo.blockSize;
2183 res = VK_SUCCESS;
2184 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2185 bufCreateInfo.size = bufSizeMax;
2186 for(size_t i = 0; res == VK_SUCCESS; ++i)
2187 {
2188 BufferInfo newBufInfo;
2189 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2190 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2191 if(res == VK_SUCCESS)
2192 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002193 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002194 prevOffsetUpper = allocInfo.offset;
2195 bufInfo.push_back(newBufInfo);
2196 }
2197 }
2198
2199 // Destroy the buffers in reverse order.
2200 while(!bufInfo.empty())
2201 {
2202 const BufferInfo& currBufInfo = bufInfo.back();
2203 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2204 bufInfo.pop_back();
2205 }
2206 }
2207
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002208 // Test ring buffer with lost allocations.
2209 {
2210 // Allocate number of buffers until pool is full.
2211 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2212 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2213 res = VK_SUCCESS;
2214 for(size_t i = 0; res == VK_SUCCESS; ++i)
2215 {
2216 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2217
2218 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2219
2220 BufferInfo newBufInfo;
2221 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2222 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2223 if(res == VK_SUCCESS)
2224 bufInfo.push_back(newBufInfo);
2225 }
2226
2227 // Free first half of it.
2228 {
2229 const size_t buffersToDelete = bufInfo.size() / 2;
2230 for(size_t i = 0; i < buffersToDelete; ++i)
2231 {
2232 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2233 }
2234 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2235 }
2236
2237 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002238 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002239 res = VK_SUCCESS;
2240 for(size_t i = 0; res == VK_SUCCESS; ++i)
2241 {
2242 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2243
2244 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2245
2246 BufferInfo newBufInfo;
2247 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2248 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2249 if(res == VK_SUCCESS)
2250 bufInfo.push_back(newBufInfo);
2251 }
2252
2253 VkDeviceSize firstNewOffset;
2254 {
2255 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2256
2257 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2258 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2259 bufCreateInfo.size = bufSizeMax;
2260
2261 BufferInfo newBufInfo;
2262 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2263 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002264 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002265 bufInfo.push_back(newBufInfo);
2266 firstNewOffset = allocInfo.offset;
2267
2268 // Make sure at least one buffer from the beginning became lost.
2269 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002270 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002271 }
2272
2273 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2274 size_t newCount = 1;
2275 for(;;)
2276 {
2277 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2278
2279 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2280
2281 BufferInfo newBufInfo;
2282 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2283 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002284 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002285 bufInfo.push_back(newBufInfo);
2286 ++newCount;
2287 if(allocInfo.offset < firstNewOffset)
2288 break;
2289 }
2290
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002291 // Delete buffers that are lost.
2292 for(size_t i = bufInfo.size(); i--; )
2293 {
2294 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2295 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2296 {
2297 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2298 bufInfo.erase(bufInfo.begin() + i);
2299 }
2300 }
2301
2302 // Test vmaMakePoolAllocationsLost
2303 {
2304 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2305
2306 size_t lostAllocCount = SIZE_MAX;
2307 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002308 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002309
2310 size_t realLostAllocCount = 0;
2311 for(size_t i = 0; i < bufInfo.size(); ++i)
2312 {
2313 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2314 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2315 ++realLostAllocCount;
2316 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002317 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002318 }
2319
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002320 // Destroy all the buffers in forward order.
2321 for(size_t i = 0; i < bufInfo.size(); ++i)
2322 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2323 bufInfo.clear();
2324 }
2325
Adam Sawicki70a683e2018-08-24 15:36:32 +02002326 vmaDestroyPool(g_hAllocator, pool);
2327}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002328
Adam Sawicki70a683e2018-08-24 15:36:32 +02002329static void TestLinearAllocatorMultiBlock()
2330{
2331 wprintf(L"Test linear allocator multi block\n");
2332
2333 RandomNumberGenerator rand{345673};
2334
2335 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2336 sampleBufCreateInfo.size = 1024 * 1024;
2337 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2338
2339 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2340 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2341
2342 VmaPoolCreateInfo poolCreateInfo = {};
2343 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2344 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002345 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002346
2347 VmaPool pool = nullptr;
2348 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002349 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002350
2351 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2352
2353 VmaAllocationCreateInfo allocCreateInfo = {};
2354 allocCreateInfo.pool = pool;
2355
2356 std::vector<BufferInfo> bufInfo;
2357 VmaAllocationInfo allocInfo;
2358
2359 // Test one-time free.
2360 {
2361 // Allocate buffers until we move to a second block.
2362 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2363 for(uint32_t i = 0; ; ++i)
2364 {
2365 BufferInfo newBufInfo;
2366 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2367 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002368 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002369 bufInfo.push_back(newBufInfo);
2370 if(lastMem && allocInfo.deviceMemory != lastMem)
2371 {
2372 break;
2373 }
2374 lastMem = allocInfo.deviceMemory;
2375 }
2376
Adam Sawickib8d34d52018-10-03 17:41:20 +02002377 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002378
2379 // Make sure that pool has now two blocks.
2380 VmaPoolStats poolStats = {};
2381 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002382 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002383
2384 // Destroy all the buffers in random order.
2385 while(!bufInfo.empty())
2386 {
2387 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2388 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2389 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2390 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2391 }
2392
2393 // Make sure that pool has now at most one block.
2394 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002395 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002396 }
2397
2398 // Test stack.
2399 {
2400 // Allocate buffers until we move to a second block.
2401 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2402 for(uint32_t i = 0; ; ++i)
2403 {
2404 BufferInfo newBufInfo;
2405 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2406 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002407 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002408 bufInfo.push_back(newBufInfo);
2409 if(lastMem && allocInfo.deviceMemory != lastMem)
2410 {
2411 break;
2412 }
2413 lastMem = allocInfo.deviceMemory;
2414 }
2415
Adam Sawickib8d34d52018-10-03 17:41:20 +02002416 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002417
2418 // Add few more buffers.
2419 for(uint32_t i = 0; i < 5; ++i)
2420 {
2421 BufferInfo newBufInfo;
2422 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2423 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002424 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002425 bufInfo.push_back(newBufInfo);
2426 }
2427
2428 // Make sure that pool has now two blocks.
2429 VmaPoolStats poolStats = {};
2430 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002431 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002432
2433 // Delete half of buffers, LIFO.
2434 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2435 {
2436 const BufferInfo& currBufInfo = bufInfo.back();
2437 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2438 bufInfo.pop_back();
2439 }
2440
2441 // Add one more buffer.
2442 BufferInfo newBufInfo;
2443 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2444 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002445 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002446 bufInfo.push_back(newBufInfo);
2447
2448 // Make sure that pool has now one block.
2449 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002450 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002451
2452 // Delete all the remaining buffers, LIFO.
2453 while(!bufInfo.empty())
2454 {
2455 const BufferInfo& currBufInfo = bufInfo.back();
2456 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2457 bufInfo.pop_back();
2458 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002459 }
2460
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002461 vmaDestroyPool(g_hAllocator, pool);
2462}
2463
Adam Sawickifd11d752018-08-22 15:02:10 +02002464static void ManuallyTestLinearAllocator()
2465{
2466 VmaStats origStats;
2467 vmaCalculateStats(g_hAllocator, &origStats);
2468
2469 wprintf(L"Manually test linear allocator\n");
2470
2471 RandomNumberGenerator rand{645332};
2472
2473 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2474 sampleBufCreateInfo.size = 1024; // Whatever.
2475 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2476
2477 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2478 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2479
2480 VmaPoolCreateInfo poolCreateInfo = {};
2481 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002482 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002483
2484 poolCreateInfo.blockSize = 10 * 1024;
2485 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2486 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2487
2488 VmaPool pool = nullptr;
2489 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002490 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002491
2492 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2493
2494 VmaAllocationCreateInfo allocCreateInfo = {};
2495 allocCreateInfo.pool = pool;
2496
2497 std::vector<BufferInfo> bufInfo;
2498 VmaAllocationInfo allocInfo;
2499 BufferInfo newBufInfo;
2500
2501 // Test double stack.
2502 {
2503 /*
2504 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2505 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2506
2507 Totally:
2508 1 block allocated
2509 10240 Vulkan bytes
2510 6 new allocations
2511 2256 bytes in allocations
2512 */
2513
2514 bufCreateInfo.size = 32;
2515 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2516 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002517 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002518 bufInfo.push_back(newBufInfo);
2519
2520 bufCreateInfo.size = 1024;
2521 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2522 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002523 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002524 bufInfo.push_back(newBufInfo);
2525
2526 bufCreateInfo.size = 32;
2527 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2528 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002529 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002530 bufInfo.push_back(newBufInfo);
2531
2532 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2533
2534 bufCreateInfo.size = 128;
2535 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2536 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002537 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002538 bufInfo.push_back(newBufInfo);
2539
2540 bufCreateInfo.size = 1024;
2541 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2542 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002543 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002544 bufInfo.push_back(newBufInfo);
2545
2546 bufCreateInfo.size = 16;
2547 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2548 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002549 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002550 bufInfo.push_back(newBufInfo);
2551
2552 VmaStats currStats;
2553 vmaCalculateStats(g_hAllocator, &currStats);
2554 VmaPoolStats poolStats;
2555 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2556
2557 char* statsStr = nullptr;
2558 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2559
2560 // PUT BREAKPOINT HERE TO CHECK.
2561 // Inspect: currStats versus origStats, poolStats, statsStr.
2562 int I = 0;
2563
2564 vmaFreeStatsString(g_hAllocator, statsStr);
2565
2566 // Destroy the buffers in reverse order.
2567 while(!bufInfo.empty())
2568 {
2569 const BufferInfo& currBufInfo = bufInfo.back();
2570 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2571 bufInfo.pop_back();
2572 }
2573 }
2574
2575 vmaDestroyPool(g_hAllocator, pool);
2576}
2577
Adam Sawicki80927152018-09-07 17:27:23 +02002578static void BenchmarkAlgorithmsCase(FILE* file,
2579 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002580 bool empty,
2581 VmaAllocationCreateFlags allocStrategy,
2582 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002583{
2584 RandomNumberGenerator rand{16223};
2585
2586 const VkDeviceSize bufSizeMin = 32;
2587 const VkDeviceSize bufSizeMax = 1024;
2588 const size_t maxBufCapacity = 10000;
2589 const uint32_t iterationCount = 10;
2590
2591 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2592 sampleBufCreateInfo.size = bufSizeMax;
2593 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2594
2595 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2596 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2597
2598 VmaPoolCreateInfo poolCreateInfo = {};
2599 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002600 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002601
2602 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002603 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002604 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2605
2606 VmaPool pool = nullptr;
2607 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002608 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002609
2610 // Buffer created just to get memory requirements. Never bound to any memory.
2611 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2612 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002613 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002614
2615 VkMemoryRequirements memReq = {};
2616 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2617
2618 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2619
2620 VmaAllocationCreateInfo allocCreateInfo = {};
2621 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002622 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002623
2624 VmaAllocation alloc;
2625 std::vector<VmaAllocation> baseAllocations;
2626
2627 if(!empty)
2628 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002629 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002630 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002631 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002632 {
2633 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2634 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002635 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002636 baseAllocations.push_back(alloc);
2637 totalSize += memReq.size;
2638 }
2639
2640 // Delete half of them, choose randomly.
2641 size_t allocsToDelete = baseAllocations.size() / 2;
2642 for(size_t i = 0; i < allocsToDelete; ++i)
2643 {
2644 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2645 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2646 baseAllocations.erase(baseAllocations.begin() + index);
2647 }
2648 }
2649
2650 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002651 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002652 std::vector<VmaAllocation> testAllocations;
2653 testAllocations.reserve(allocCount);
2654 duration allocTotalDuration = duration::zero();
2655 duration freeTotalDuration = duration::zero();
2656 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2657 {
2658 // Allocations
2659 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2660 for(size_t i = 0; i < allocCount; ++i)
2661 {
2662 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2663 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002664 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002665 testAllocations.push_back(alloc);
2666 }
2667 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2668
2669 // Deallocations
2670 switch(freeOrder)
2671 {
2672 case FREE_ORDER::FORWARD:
2673 // Leave testAllocations unchanged.
2674 break;
2675 case FREE_ORDER::BACKWARD:
2676 std::reverse(testAllocations.begin(), testAllocations.end());
2677 break;
2678 case FREE_ORDER::RANDOM:
2679 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2680 break;
2681 default: assert(0);
2682 }
2683
2684 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2685 for(size_t i = 0; i < allocCount; ++i)
2686 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2687 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2688
2689 testAllocations.clear();
2690 }
2691
2692 // Delete baseAllocations
2693 while(!baseAllocations.empty())
2694 {
2695 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2696 baseAllocations.pop_back();
2697 }
2698
2699 vmaDestroyPool(g_hAllocator, pool);
2700
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002701 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2702 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2703
Adam Sawicki80927152018-09-07 17:27:23 +02002704 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2705 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002706 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002707 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002708 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002709 allocTotalSeconds,
2710 freeTotalSeconds);
2711
2712 if(file)
2713 {
2714 std::string currTime;
2715 CurrentTimeToStr(currTime);
2716
Adam Sawicki80927152018-09-07 17:27:23 +02002717 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002718 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002719 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002720 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002721 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002722 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2723 allocTotalSeconds,
2724 freeTotalSeconds);
2725 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002726}
2727
Adam Sawicki80927152018-09-07 17:27:23 +02002728static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002729{
Adam Sawicki80927152018-09-07 17:27:23 +02002730 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002731
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002732 if(file)
2733 {
2734 fprintf(file,
2735 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002736 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002737 "Allocation time (s),Deallocation time (s)\n");
2738 }
2739
Adam Sawicki0a607132018-08-24 11:18:41 +02002740 uint32_t freeOrderCount = 1;
2741 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2742 freeOrderCount = 3;
2743 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2744 freeOrderCount = 2;
2745
2746 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002747 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002748
2749 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2750 {
2751 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2752 switch(freeOrderIndex)
2753 {
2754 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2755 case 1: freeOrder = FREE_ORDER::FORWARD; break;
2756 case 2: freeOrder = FREE_ORDER::RANDOM; break;
2757 default: assert(0);
2758 }
2759
2760 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
2761 {
Adam Sawicki80927152018-09-07 17:27:23 +02002762 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02002763 {
Adam Sawicki80927152018-09-07 17:27:23 +02002764 uint32_t algorithm = 0;
2765 switch(algorithmIndex)
2766 {
2767 case 0:
2768 break;
2769 case 1:
2770 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
2771 break;
2772 case 2:
2773 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2774 break;
2775 default:
2776 assert(0);
2777 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002778
Adam Sawicki80927152018-09-07 17:27:23 +02002779 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002780 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
2781 {
2782 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02002783 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002784 {
2785 switch(allocStrategyIndex)
2786 {
2787 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
2788 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
2789 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
2790 default: assert(0);
2791 }
2792 }
2793
Adam Sawicki80927152018-09-07 17:27:23 +02002794 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002795 file,
Adam Sawicki80927152018-09-07 17:27:23 +02002796 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002797 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002798 strategy,
2799 freeOrder); // freeOrder
2800 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002801 }
2802 }
2803 }
2804}
2805
Adam Sawickib8333fb2018-03-13 16:15:53 +01002806static void TestPool_SameSize()
2807{
2808 const VkDeviceSize BUF_SIZE = 1024 * 1024;
2809 const size_t BUF_COUNT = 100;
2810 VkResult res;
2811
2812 RandomNumberGenerator rand{123};
2813
2814 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2815 bufferInfo.size = BUF_SIZE;
2816 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2817
2818 uint32_t memoryTypeBits = UINT32_MAX;
2819 {
2820 VkBuffer dummyBuffer;
2821 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002822 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002823
2824 VkMemoryRequirements memReq;
2825 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2826 memoryTypeBits = memReq.memoryTypeBits;
2827
2828 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2829 }
2830
2831 VmaAllocationCreateInfo poolAllocInfo = {};
2832 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2833 uint32_t memTypeIndex;
2834 res = vmaFindMemoryTypeIndex(
2835 g_hAllocator,
2836 memoryTypeBits,
2837 &poolAllocInfo,
2838 &memTypeIndex);
2839
2840 VmaPoolCreateInfo poolCreateInfo = {};
2841 poolCreateInfo.memoryTypeIndex = memTypeIndex;
2842 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
2843 poolCreateInfo.minBlockCount = 1;
2844 poolCreateInfo.maxBlockCount = 4;
2845 poolCreateInfo.frameInUseCount = 0;
2846
2847 VmaPool pool;
2848 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002849 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002850
2851 vmaSetCurrentFrameIndex(g_hAllocator, 1);
2852
2853 VmaAllocationCreateInfo allocInfo = {};
2854 allocInfo.pool = pool;
2855 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
2856 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2857
2858 struct BufItem
2859 {
2860 VkBuffer Buf;
2861 VmaAllocation Alloc;
2862 };
2863 std::vector<BufItem> items;
2864
2865 // Fill entire pool.
2866 for(size_t i = 0; i < BUF_COUNT; ++i)
2867 {
2868 BufItem item;
2869 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002870 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002871 items.push_back(item);
2872 }
2873
2874 // Make sure that another allocation would fail.
2875 {
2876 BufItem item;
2877 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002878 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002879 }
2880
2881 // Validate that no buffer is lost. Also check that they are not mapped.
2882 for(size_t i = 0; i < items.size(); ++i)
2883 {
2884 VmaAllocationInfo allocInfo;
2885 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002886 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
2887 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002888 }
2889
2890 // Free some percent of random items.
2891 {
2892 const size_t PERCENT_TO_FREE = 10;
2893 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
2894 for(size_t i = 0; i < itemsToFree; ++i)
2895 {
2896 size_t index = (size_t)rand.Generate() % items.size();
2897 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
2898 items.erase(items.begin() + index);
2899 }
2900 }
2901
2902 // Randomly allocate and free items.
2903 {
2904 const size_t OPERATION_COUNT = BUF_COUNT;
2905 for(size_t i = 0; i < OPERATION_COUNT; ++i)
2906 {
2907 bool allocate = rand.Generate() % 2 != 0;
2908 if(allocate)
2909 {
2910 if(items.size() < BUF_COUNT)
2911 {
2912 BufItem item;
2913 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002914 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002915 items.push_back(item);
2916 }
2917 }
2918 else // Free
2919 {
2920 if(!items.empty())
2921 {
2922 size_t index = (size_t)rand.Generate() % items.size();
2923 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
2924 items.erase(items.begin() + index);
2925 }
2926 }
2927 }
2928 }
2929
2930 // Allocate up to maximum.
2931 while(items.size() < BUF_COUNT)
2932 {
2933 BufItem item;
2934 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002935 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002936 items.push_back(item);
2937 }
2938
2939 // Validate that no buffer is lost.
2940 for(size_t i = 0; i < items.size(); ++i)
2941 {
2942 VmaAllocationInfo allocInfo;
2943 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002944 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002945 }
2946
2947 // Next frame.
2948 vmaSetCurrentFrameIndex(g_hAllocator, 2);
2949
2950 // Allocate another BUF_COUNT buffers.
2951 for(size_t i = 0; i < BUF_COUNT; ++i)
2952 {
2953 BufItem item;
2954 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002955 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002956 items.push_back(item);
2957 }
2958
2959 // Make sure the first BUF_COUNT is lost. Delete them.
2960 for(size_t i = 0; i < BUF_COUNT; ++i)
2961 {
2962 VmaAllocationInfo allocInfo;
2963 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002964 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002965 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
2966 }
2967 items.erase(items.begin(), items.begin() + BUF_COUNT);
2968
2969 // Validate that no buffer is lost.
2970 for(size_t i = 0; i < items.size(); ++i)
2971 {
2972 VmaAllocationInfo allocInfo;
2973 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002974 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002975 }
2976
2977 // Free one item.
2978 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
2979 items.pop_back();
2980
2981 // Validate statistics.
2982 {
2983 VmaPoolStats poolStats = {};
2984 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002985 TEST(poolStats.allocationCount == items.size());
2986 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
2987 TEST(poolStats.unusedRangeCount == 1);
2988 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
2989 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002990 }
2991
2992 // Free all remaining items.
2993 for(size_t i = items.size(); i--; )
2994 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
2995 items.clear();
2996
2997 // Allocate maximum items again.
2998 for(size_t i = 0; i < BUF_COUNT; ++i)
2999 {
3000 BufItem item;
3001 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003002 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003003 items.push_back(item);
3004 }
3005
3006 // Delete every other item.
3007 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3008 {
3009 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3010 items.erase(items.begin() + i);
3011 }
3012
3013 // Defragment!
3014 {
3015 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3016 for(size_t i = 0; i < items.size(); ++i)
3017 allocationsToDefragment[i] = items[i].Alloc;
3018
3019 VmaDefragmentationStats defragmentationStats;
3020 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003021 TEST(res == VK_SUCCESS);
3022 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003023 }
3024
3025 // Free all remaining items.
3026 for(size_t i = items.size(); i--; )
3027 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3028 items.clear();
3029
3030 ////////////////////////////////////////////////////////////////////////////////
3031 // Test for vmaMakePoolAllocationsLost
3032
3033 // Allocate 4 buffers on frame 10.
3034 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3035 for(size_t i = 0; i < 4; ++i)
3036 {
3037 BufItem item;
3038 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003039 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003040 items.push_back(item);
3041 }
3042
3043 // Touch first 2 of them on frame 11.
3044 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3045 for(size_t i = 0; i < 2; ++i)
3046 {
3047 VmaAllocationInfo allocInfo;
3048 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3049 }
3050
3051 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3052 size_t lostCount = 0xDEADC0DE;
3053 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003054 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003055
3056 // Make another call. Now 0 should be lost.
3057 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003058 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003059
3060 // Make another call, with null count. Should not crash.
3061 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3062
3063 // END: Free all remaining items.
3064 for(size_t i = items.size(); i--; )
3065 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3066
3067 items.clear();
3068
Adam Sawickid2924172018-06-11 12:48:46 +02003069 ////////////////////////////////////////////////////////////////////////////////
3070 // Test for allocation too large for pool
3071
3072 {
3073 VmaAllocationCreateInfo allocCreateInfo = {};
3074 allocCreateInfo.pool = pool;
3075
3076 VkMemoryRequirements memReq;
3077 memReq.memoryTypeBits = UINT32_MAX;
3078 memReq.alignment = 1;
3079 memReq.size = poolCreateInfo.blockSize + 4;
3080
3081 VmaAllocation alloc = nullptr;
3082 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003083 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003084 }
3085
Adam Sawickib8333fb2018-03-13 16:15:53 +01003086 vmaDestroyPool(g_hAllocator, pool);
3087}
3088
Adam Sawickie44c6262018-06-15 14:30:39 +02003089static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3090{
3091 const uint8_t* pBytes = (const uint8_t*)pMemory;
3092 for(size_t i = 0; i < size; ++i)
3093 {
3094 if(pBytes[i] != pattern)
3095 {
3096 return false;
3097 }
3098 }
3099 return true;
3100}
3101
3102static void TestAllocationsInitialization()
3103{
3104 VkResult res;
3105
3106 const size_t BUF_SIZE = 1024;
3107
3108 // Create pool.
3109
3110 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3111 bufInfo.size = BUF_SIZE;
3112 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3113
3114 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3115 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3116
3117 VmaPoolCreateInfo poolCreateInfo = {};
3118 poolCreateInfo.blockSize = BUF_SIZE * 10;
3119 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3120 poolCreateInfo.maxBlockCount = 1;
3121 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003122 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003123
3124 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3125 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003126 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003127
3128 // Create one persistently mapped buffer to keep memory of this block mapped,
3129 // so that pointer to mapped data will remain (more or less...) valid even
3130 // after destruction of other allocations.
3131
3132 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3133 VkBuffer firstBuf;
3134 VmaAllocation firstAlloc;
3135 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003136 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003137
3138 // Test buffers.
3139
3140 for(uint32_t i = 0; i < 2; ++i)
3141 {
3142 const bool persistentlyMapped = i == 0;
3143 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3144 VkBuffer buf;
3145 VmaAllocation alloc;
3146 VmaAllocationInfo allocInfo;
3147 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003148 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003149
3150 void* pMappedData;
3151 if(!persistentlyMapped)
3152 {
3153 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003154 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003155 }
3156 else
3157 {
3158 pMappedData = allocInfo.pMappedData;
3159 }
3160
3161 // Validate initialized content
3162 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003163 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003164
3165 if(!persistentlyMapped)
3166 {
3167 vmaUnmapMemory(g_hAllocator, alloc);
3168 }
3169
3170 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3171
3172 // Validate freed content
3173 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003174 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003175 }
3176
3177 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3178 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3179}
3180
Adam Sawickib8333fb2018-03-13 16:15:53 +01003181static void TestPool_Benchmark(
3182 PoolTestResult& outResult,
3183 const PoolTestConfig& config)
3184{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003185 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003186
3187 RandomNumberGenerator mainRand{config.RandSeed};
3188
3189 uint32_t allocationSizeProbabilitySum = std::accumulate(
3190 config.AllocationSizes.begin(),
3191 config.AllocationSizes.end(),
3192 0u,
3193 [](uint32_t sum, const AllocationSize& allocSize) {
3194 return sum + allocSize.Probability;
3195 });
3196
3197 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3198 bufferInfo.size = 256; // Whatever.
3199 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3200
3201 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3202 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3203 imageInfo.extent.width = 256; // Whatever.
3204 imageInfo.extent.height = 256; // Whatever.
3205 imageInfo.extent.depth = 1;
3206 imageInfo.mipLevels = 1;
3207 imageInfo.arrayLayers = 1;
3208 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3209 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3210 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3211 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3212 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3213
3214 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3215 {
3216 VkBuffer dummyBuffer;
3217 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003218 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003219
3220 VkMemoryRequirements memReq;
3221 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3222 bufferMemoryTypeBits = memReq.memoryTypeBits;
3223
3224 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3225 }
3226
3227 uint32_t imageMemoryTypeBits = UINT32_MAX;
3228 {
3229 VkImage dummyImage;
3230 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003231 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003232
3233 VkMemoryRequirements memReq;
3234 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3235 imageMemoryTypeBits = memReq.memoryTypeBits;
3236
3237 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3238 }
3239
3240 uint32_t memoryTypeBits = 0;
3241 if(config.UsesBuffers() && config.UsesImages())
3242 {
3243 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3244 if(memoryTypeBits == 0)
3245 {
3246 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3247 return;
3248 }
3249 }
3250 else if(config.UsesBuffers())
3251 memoryTypeBits = bufferMemoryTypeBits;
3252 else if(config.UsesImages())
3253 memoryTypeBits = imageMemoryTypeBits;
3254 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003255 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003256
3257 VmaPoolCreateInfo poolCreateInfo = {};
3258 poolCreateInfo.memoryTypeIndex = 0;
3259 poolCreateInfo.minBlockCount = 1;
3260 poolCreateInfo.maxBlockCount = 1;
3261 poolCreateInfo.blockSize = config.PoolSize;
3262 poolCreateInfo.frameInUseCount = 1;
3263
3264 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3265 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3266 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3267
3268 VmaPool pool;
3269 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003270 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003271
3272 // Start time measurement - after creating pool and initializing data structures.
3273 time_point timeBeg = std::chrono::high_resolution_clock::now();
3274
3275 ////////////////////////////////////////////////////////////////////////////////
3276 // ThreadProc
3277 auto ThreadProc = [&](
3278 PoolTestThreadResult* outThreadResult,
3279 uint32_t randSeed,
3280 HANDLE frameStartEvent,
3281 HANDLE frameEndEvent) -> void
3282 {
3283 RandomNumberGenerator threadRand{randSeed};
3284
3285 outThreadResult->AllocationTimeMin = duration::max();
3286 outThreadResult->AllocationTimeSum = duration::zero();
3287 outThreadResult->AllocationTimeMax = duration::min();
3288 outThreadResult->DeallocationTimeMin = duration::max();
3289 outThreadResult->DeallocationTimeSum = duration::zero();
3290 outThreadResult->DeallocationTimeMax = duration::min();
3291 outThreadResult->AllocationCount = 0;
3292 outThreadResult->DeallocationCount = 0;
3293 outThreadResult->LostAllocationCount = 0;
3294 outThreadResult->LostAllocationTotalSize = 0;
3295 outThreadResult->FailedAllocationCount = 0;
3296 outThreadResult->FailedAllocationTotalSize = 0;
3297
3298 struct Item
3299 {
3300 VkDeviceSize BufferSize;
3301 VkExtent2D ImageSize;
3302 VkBuffer Buf;
3303 VkImage Image;
3304 VmaAllocation Alloc;
3305
3306 VkDeviceSize CalcSizeBytes() const
3307 {
3308 return BufferSize +
3309 ImageSize.width * ImageSize.height * 4;
3310 }
3311 };
3312 std::vector<Item> unusedItems, usedItems;
3313
3314 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3315
3316 // Create all items - all unused, not yet allocated.
3317 for(size_t i = 0; i < threadTotalItemCount; ++i)
3318 {
3319 Item item = {};
3320
3321 uint32_t allocSizeIndex = 0;
3322 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3323 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3324 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3325
3326 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3327 if(allocSize.BufferSizeMax > 0)
3328 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003329 TEST(allocSize.BufferSizeMin > 0);
3330 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003331 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3332 item.BufferSize = allocSize.BufferSizeMin;
3333 else
3334 {
3335 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3336 item.BufferSize = item.BufferSize / 16 * 16;
3337 }
3338 }
3339 else
3340 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003341 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003342 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3343 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3344 else
3345 {
3346 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3347 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3348 }
3349 }
3350
3351 unusedItems.push_back(item);
3352 }
3353
3354 auto Allocate = [&](Item& item) -> VkResult
3355 {
3356 VmaAllocationCreateInfo allocCreateInfo = {};
3357 allocCreateInfo.pool = pool;
3358 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3359 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3360
3361 if(item.BufferSize)
3362 {
3363 bufferInfo.size = item.BufferSize;
3364 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3365 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3366 }
3367 else
3368 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003369 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003370
3371 imageInfo.extent.width = item.ImageSize.width;
3372 imageInfo.extent.height = item.ImageSize.height;
3373 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3374 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3375 }
3376 };
3377
3378 ////////////////////////////////////////////////////////////////////////////////
3379 // Frames
3380 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3381 {
3382 WaitForSingleObject(frameStartEvent, INFINITE);
3383
3384 // Always make some percent of used bufs unused, to choose different used ones.
3385 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3386 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3387 {
3388 size_t index = threadRand.Generate() % usedItems.size();
3389 unusedItems.push_back(usedItems[index]);
3390 usedItems.erase(usedItems.begin() + index);
3391 }
3392
3393 // Determine which bufs we want to use in this frame.
3394 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3395 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003396 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003397 // Move some used to unused.
3398 while(usedBufCount < usedItems.size())
3399 {
3400 size_t index = threadRand.Generate() % usedItems.size();
3401 unusedItems.push_back(usedItems[index]);
3402 usedItems.erase(usedItems.begin() + index);
3403 }
3404 // Move some unused to used.
3405 while(usedBufCount > usedItems.size())
3406 {
3407 size_t index = threadRand.Generate() % unusedItems.size();
3408 usedItems.push_back(unusedItems[index]);
3409 unusedItems.erase(unusedItems.begin() + index);
3410 }
3411
3412 uint32_t touchExistingCount = 0;
3413 uint32_t touchLostCount = 0;
3414 uint32_t createSucceededCount = 0;
3415 uint32_t createFailedCount = 0;
3416
3417 // Touch all used bufs. If not created or lost, allocate.
3418 for(size_t i = 0; i < usedItems.size(); ++i)
3419 {
3420 Item& item = usedItems[i];
3421 // Not yet created.
3422 if(item.Alloc == VK_NULL_HANDLE)
3423 {
3424 res = Allocate(item);
3425 ++outThreadResult->AllocationCount;
3426 if(res != VK_SUCCESS)
3427 {
3428 item.Alloc = VK_NULL_HANDLE;
3429 item.Buf = VK_NULL_HANDLE;
3430 ++outThreadResult->FailedAllocationCount;
3431 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3432 ++createFailedCount;
3433 }
3434 else
3435 ++createSucceededCount;
3436 }
3437 else
3438 {
3439 // Touch.
3440 VmaAllocationInfo allocInfo;
3441 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3442 // Lost.
3443 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3444 {
3445 ++touchLostCount;
3446
3447 // Destroy.
3448 {
3449 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3450 if(item.Buf)
3451 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3452 else
3453 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3454 ++outThreadResult->DeallocationCount;
3455 }
3456 item.Alloc = VK_NULL_HANDLE;
3457 item.Buf = VK_NULL_HANDLE;
3458
3459 ++outThreadResult->LostAllocationCount;
3460 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3461
3462 // Recreate.
3463 res = Allocate(item);
3464 ++outThreadResult->AllocationCount;
3465 // Creation failed.
3466 if(res != VK_SUCCESS)
3467 {
3468 ++outThreadResult->FailedAllocationCount;
3469 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3470 ++createFailedCount;
3471 }
3472 else
3473 ++createSucceededCount;
3474 }
3475 else
3476 ++touchExistingCount;
3477 }
3478 }
3479
3480 /*
3481 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3482 randSeed, frameIndex,
3483 touchExistingCount, touchLostCount,
3484 createSucceededCount, createFailedCount);
3485 */
3486
3487 SetEvent(frameEndEvent);
3488 }
3489
3490 // Free all remaining items.
3491 for(size_t i = usedItems.size(); i--; )
3492 {
3493 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3494 if(usedItems[i].Buf)
3495 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3496 else
3497 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3498 ++outThreadResult->DeallocationCount;
3499 }
3500 for(size_t i = unusedItems.size(); i--; )
3501 {
3502 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3503 if(unusedItems[i].Buf)
3504 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3505 else
3506 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3507 ++outThreadResult->DeallocationCount;
3508 }
3509 };
3510
3511 // Launch threads.
3512 uint32_t threadRandSeed = mainRand.Generate();
3513 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3514 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3515 std::vector<std::thread> bkgThreads;
3516 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3517 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3518 {
3519 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3520 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3521 bkgThreads.emplace_back(std::bind(
3522 ThreadProc,
3523 &threadResults[threadIndex],
3524 threadRandSeed + threadIndex,
3525 frameStartEvents[threadIndex],
3526 frameEndEvents[threadIndex]));
3527 }
3528
3529 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003530 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003531 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3532 {
3533 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3534 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3535 SetEvent(frameStartEvents[threadIndex]);
3536 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3537 }
3538
3539 // Wait for threads finished
3540 for(size_t i = 0; i < bkgThreads.size(); ++i)
3541 {
3542 bkgThreads[i].join();
3543 CloseHandle(frameEndEvents[i]);
3544 CloseHandle(frameStartEvents[i]);
3545 }
3546 bkgThreads.clear();
3547
3548 // Finish time measurement - before destroying pool.
3549 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3550
3551 vmaDestroyPool(g_hAllocator, pool);
3552
3553 outResult.AllocationTimeMin = duration::max();
3554 outResult.AllocationTimeAvg = duration::zero();
3555 outResult.AllocationTimeMax = duration::min();
3556 outResult.DeallocationTimeMin = duration::max();
3557 outResult.DeallocationTimeAvg = duration::zero();
3558 outResult.DeallocationTimeMax = duration::min();
3559 outResult.LostAllocationCount = 0;
3560 outResult.LostAllocationTotalSize = 0;
3561 outResult.FailedAllocationCount = 0;
3562 outResult.FailedAllocationTotalSize = 0;
3563 size_t allocationCount = 0;
3564 size_t deallocationCount = 0;
3565 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3566 {
3567 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3568 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3569 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3570 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3571 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3572 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3573 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3574 allocationCount += threadResult.AllocationCount;
3575 deallocationCount += threadResult.DeallocationCount;
3576 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3577 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3578 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3579 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3580 }
3581 if(allocationCount)
3582 outResult.AllocationTimeAvg /= allocationCount;
3583 if(deallocationCount)
3584 outResult.DeallocationTimeAvg /= deallocationCount;
3585}
3586
3587static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3588{
3589 if(ptr1 < ptr2)
3590 return ptr1 + size1 > ptr2;
3591 else if(ptr2 < ptr1)
3592 return ptr2 + size2 > ptr1;
3593 else
3594 return true;
3595}
3596
3597static void TestMapping()
3598{
3599 wprintf(L"Testing mapping...\n");
3600
3601 VkResult res;
3602 uint32_t memTypeIndex = UINT32_MAX;
3603
3604 enum TEST
3605 {
3606 TEST_NORMAL,
3607 TEST_POOL,
3608 TEST_DEDICATED,
3609 TEST_COUNT
3610 };
3611 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3612 {
3613 VmaPool pool = nullptr;
3614 if(testIndex == TEST_POOL)
3615 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003616 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003617 VmaPoolCreateInfo poolInfo = {};
3618 poolInfo.memoryTypeIndex = memTypeIndex;
3619 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003620 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003621 }
3622
3623 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3624 bufInfo.size = 0x10000;
3625 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3626
3627 VmaAllocationCreateInfo allocCreateInfo = {};
3628 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3629 allocCreateInfo.pool = pool;
3630 if(testIndex == TEST_DEDICATED)
3631 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3632
3633 VmaAllocationInfo allocInfo;
3634
3635 // Mapped manually
3636
3637 // Create 2 buffers.
3638 BufferInfo bufferInfos[3];
3639 for(size_t i = 0; i < 2; ++i)
3640 {
3641 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3642 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003643 TEST(res == VK_SUCCESS);
3644 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003645 memTypeIndex = allocInfo.memoryType;
3646 }
3647
3648 // Map buffer 0.
3649 char* data00 = nullptr;
3650 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003651 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003652 data00[0xFFFF] = data00[0];
3653
3654 // Map buffer 0 second time.
3655 char* data01 = nullptr;
3656 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003657 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003658
3659 // Map buffer 1.
3660 char* data1 = nullptr;
3661 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003662 TEST(res == VK_SUCCESS && data1 != nullptr);
3663 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01003664 data1[0xFFFF] = data1[0];
3665
3666 // Unmap buffer 0 two times.
3667 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3668 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3669 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003670 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003671
3672 // Unmap buffer 1.
3673 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
3674 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003675 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003676
3677 // Create 3rd buffer - persistently mapped.
3678 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3679 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3680 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003681 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003682
3683 // Map buffer 2.
3684 char* data2 = nullptr;
3685 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003686 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003687 data2[0xFFFF] = data2[0];
3688
3689 // Unmap buffer 2.
3690 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
3691 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003692 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003693
3694 // Destroy all buffers.
3695 for(size_t i = 3; i--; )
3696 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
3697
3698 vmaDestroyPool(g_hAllocator, pool);
3699 }
3700}
3701
3702static void TestMappingMultithreaded()
3703{
3704 wprintf(L"Testing mapping multithreaded...\n");
3705
3706 static const uint32_t threadCount = 16;
3707 static const uint32_t bufferCount = 1024;
3708 static const uint32_t threadBufferCount = bufferCount / threadCount;
3709
3710 VkResult res;
3711 volatile uint32_t memTypeIndex = UINT32_MAX;
3712
3713 enum TEST
3714 {
3715 TEST_NORMAL,
3716 TEST_POOL,
3717 TEST_DEDICATED,
3718 TEST_COUNT
3719 };
3720 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3721 {
3722 VmaPool pool = nullptr;
3723 if(testIndex == TEST_POOL)
3724 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003725 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003726 VmaPoolCreateInfo poolInfo = {};
3727 poolInfo.memoryTypeIndex = memTypeIndex;
3728 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003729 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003730 }
3731
3732 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3733 bufCreateInfo.size = 0x10000;
3734 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3735
3736 VmaAllocationCreateInfo allocCreateInfo = {};
3737 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3738 allocCreateInfo.pool = pool;
3739 if(testIndex == TEST_DEDICATED)
3740 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3741
3742 std::thread threads[threadCount];
3743 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
3744 {
3745 threads[threadIndex] = std::thread([=, &memTypeIndex](){
3746 // ======== THREAD FUNCTION ========
3747
3748 RandomNumberGenerator rand{threadIndex};
3749
3750 enum class MODE
3751 {
3752 // Don't map this buffer at all.
3753 DONT_MAP,
3754 // Map and quickly unmap.
3755 MAP_FOR_MOMENT,
3756 // Map and unmap before destruction.
3757 MAP_FOR_LONGER,
3758 // Map two times. Quickly unmap, second unmap before destruction.
3759 MAP_TWO_TIMES,
3760 // Create this buffer as persistently mapped.
3761 PERSISTENTLY_MAPPED,
3762 COUNT
3763 };
3764 std::vector<BufferInfo> bufInfos{threadBufferCount};
3765 std::vector<MODE> bufModes{threadBufferCount};
3766
3767 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
3768 {
3769 BufferInfo& bufInfo = bufInfos[bufferIndex];
3770 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
3771 bufModes[bufferIndex] = mode;
3772
3773 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
3774 if(mode == MODE::PERSISTENTLY_MAPPED)
3775 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3776
3777 VmaAllocationInfo allocInfo;
3778 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
3779 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003780 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003781
3782 if(memTypeIndex == UINT32_MAX)
3783 memTypeIndex = allocInfo.memoryType;
3784
3785 char* data = nullptr;
3786
3787 if(mode == MODE::PERSISTENTLY_MAPPED)
3788 {
3789 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003790 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003791 }
3792 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
3793 mode == MODE::MAP_TWO_TIMES)
3794 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003795 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003796 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003797 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003798
3799 if(mode == MODE::MAP_TWO_TIMES)
3800 {
3801 char* data2 = nullptr;
3802 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003803 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003804 }
3805 }
3806 else if(mode == MODE::DONT_MAP)
3807 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003808 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003809 }
3810 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003811 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003812
3813 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
3814 if(data)
3815 data[0xFFFF] = data[0];
3816
3817 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
3818 {
3819 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
3820
3821 VmaAllocationInfo allocInfo;
3822 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
3823 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02003824 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003825 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003826 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003827 }
3828
3829 switch(rand.Generate() % 3)
3830 {
3831 case 0: Sleep(0); break; // Yield.
3832 case 1: Sleep(10); break; // 10 ms
3833 // default: No sleep.
3834 }
3835
3836 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
3837 if(data)
3838 data[0xFFFF] = data[0];
3839 }
3840
3841 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
3842 {
3843 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
3844 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
3845 {
3846 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
3847
3848 VmaAllocationInfo allocInfo;
3849 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003850 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003851 }
3852
3853 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
3854 }
3855 });
3856 }
3857
3858 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
3859 threads[threadIndex].join();
3860
3861 vmaDestroyPool(g_hAllocator, pool);
3862 }
3863}
3864
3865static void WriteMainTestResultHeader(FILE* file)
3866{
3867 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02003868 "Code,Time,"
3869 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01003870 "Total Time (us),"
3871 "Allocation Time Min (us),"
3872 "Allocation Time Avg (us),"
3873 "Allocation Time Max (us),"
3874 "Deallocation Time Min (us),"
3875 "Deallocation Time Avg (us),"
3876 "Deallocation Time Max (us),"
3877 "Total Memory Allocated (B),"
3878 "Free Range Size Avg (B),"
3879 "Free Range Size Max (B)\n");
3880}
3881
3882static void WriteMainTestResult(
3883 FILE* file,
3884 const char* codeDescription,
3885 const char* testDescription,
3886 const Config& config, const Result& result)
3887{
3888 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
3889 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
3890 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
3891 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
3892 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
3893 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
3894 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
3895
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003896 std::string currTime;
3897 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003898
3899 fprintf(file,
3900 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01003901 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
3902 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003903 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02003904 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01003905 totalTimeSeconds * 1e6f,
3906 allocationTimeMinSeconds * 1e6f,
3907 allocationTimeAvgSeconds * 1e6f,
3908 allocationTimeMaxSeconds * 1e6f,
3909 deallocationTimeMinSeconds * 1e6f,
3910 deallocationTimeAvgSeconds * 1e6f,
3911 deallocationTimeMaxSeconds * 1e6f,
3912 result.TotalMemoryAllocated,
3913 result.FreeRangeSizeAvg,
3914 result.FreeRangeSizeMax);
3915}
3916
3917static void WritePoolTestResultHeader(FILE* file)
3918{
3919 fprintf(file,
3920 "Code,Test,Time,"
3921 "Config,"
3922 "Total Time (us),"
3923 "Allocation Time Min (us),"
3924 "Allocation Time Avg (us),"
3925 "Allocation Time Max (us),"
3926 "Deallocation Time Min (us),"
3927 "Deallocation Time Avg (us),"
3928 "Deallocation Time Max (us),"
3929 "Lost Allocation Count,"
3930 "Lost Allocation Total Size (B),"
3931 "Failed Allocation Count,"
3932 "Failed Allocation Total Size (B)\n");
3933}
3934
3935static void WritePoolTestResult(
3936 FILE* file,
3937 const char* codeDescription,
3938 const char* testDescription,
3939 const PoolTestConfig& config,
3940 const PoolTestResult& result)
3941{
3942 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
3943 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
3944 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
3945 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
3946 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
3947 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
3948 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
3949
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003950 std::string currTime;
3951 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003952
3953 fprintf(file,
3954 "%s,%s,%s,"
3955 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
3956 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
3957 // General
3958 codeDescription,
3959 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003960 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01003961 // Config
3962 config.ThreadCount,
3963 (unsigned long long)config.PoolSize,
3964 config.FrameCount,
3965 config.TotalItemCount,
3966 config.UsedItemCountMin,
3967 config.UsedItemCountMax,
3968 config.ItemsToMakeUnusedPercent,
3969 // Results
3970 totalTimeSeconds * 1e6f,
3971 allocationTimeMinSeconds * 1e6f,
3972 allocationTimeAvgSeconds * 1e6f,
3973 allocationTimeMaxSeconds * 1e6f,
3974 deallocationTimeMinSeconds * 1e6f,
3975 deallocationTimeAvgSeconds * 1e6f,
3976 deallocationTimeMaxSeconds * 1e6f,
3977 result.LostAllocationCount,
3978 result.LostAllocationTotalSize,
3979 result.FailedAllocationCount,
3980 result.FailedAllocationTotalSize);
3981}
3982
3983static void PerformCustomMainTest(FILE* file)
3984{
3985 Config config{};
3986 config.RandSeed = 65735476;
3987 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
3988 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
3989 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
3990 config.FreeOrder = FREE_ORDER::FORWARD;
3991 config.ThreadCount = 16;
3992 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02003993 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01003994
3995 // Buffers
3996 //config.AllocationSizes.push_back({4, 16, 1024});
3997 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
3998
3999 // Images
4000 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4001 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4002
4003 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4004 config.AdditionalOperationCount = 1024;
4005
4006 Result result{};
4007 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004008 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004009 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4010}
4011
4012static void PerformCustomPoolTest(FILE* file)
4013{
4014 PoolTestConfig config;
4015 config.PoolSize = 100 * 1024 * 1024;
4016 config.RandSeed = 2345764;
4017 config.ThreadCount = 1;
4018 config.FrameCount = 200;
4019 config.ItemsToMakeUnusedPercent = 2;
4020
4021 AllocationSize allocSize = {};
4022 allocSize.BufferSizeMin = 1024;
4023 allocSize.BufferSizeMax = 1024 * 1024;
4024 allocSize.Probability = 1;
4025 config.AllocationSizes.push_back(allocSize);
4026
4027 allocSize.BufferSizeMin = 0;
4028 allocSize.BufferSizeMax = 0;
4029 allocSize.ImageSizeMin = 128;
4030 allocSize.ImageSizeMax = 1024;
4031 allocSize.Probability = 1;
4032 config.AllocationSizes.push_back(allocSize);
4033
4034 config.PoolSize = config.CalcAvgResourceSize() * 200;
4035 config.UsedItemCountMax = 160;
4036 config.TotalItemCount = config.UsedItemCountMax * 10;
4037 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4038
4039 g_MemoryAliasingWarningEnabled = false;
4040 PoolTestResult result = {};
4041 TestPool_Benchmark(result, config);
4042 g_MemoryAliasingWarningEnabled = true;
4043
4044 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4045}
4046
Adam Sawickib8333fb2018-03-13 16:15:53 +01004047static void PerformMainTests(FILE* file)
4048{
4049 uint32_t repeatCount = 1;
4050 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4051
4052 Config config{};
4053 config.RandSeed = 65735476;
4054 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4055 config.FreeOrder = FREE_ORDER::FORWARD;
4056
4057 size_t threadCountCount = 1;
4058 switch(ConfigType)
4059 {
4060 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4061 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4062 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4063 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4064 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4065 default: assert(0);
4066 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004067
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004068 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004069
Adam Sawickib8333fb2018-03-13 16:15:53 +01004070 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4071 {
4072 std::string desc1;
4073
4074 switch(threadCountIndex)
4075 {
4076 case 0:
4077 desc1 += "1_thread";
4078 config.ThreadCount = 1;
4079 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4080 break;
4081 case 1:
4082 desc1 += "16_threads+0%_common";
4083 config.ThreadCount = 16;
4084 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4085 break;
4086 case 2:
4087 desc1 += "16_threads+50%_common";
4088 config.ThreadCount = 16;
4089 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4090 break;
4091 case 3:
4092 desc1 += "16_threads+100%_common";
4093 config.ThreadCount = 16;
4094 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4095 break;
4096 case 4:
4097 desc1 += "2_threads+0%_common";
4098 config.ThreadCount = 2;
4099 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4100 break;
4101 case 5:
4102 desc1 += "2_threads+50%_common";
4103 config.ThreadCount = 2;
4104 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4105 break;
4106 case 6:
4107 desc1 += "2_threads+100%_common";
4108 config.ThreadCount = 2;
4109 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4110 break;
4111 default:
4112 assert(0);
4113 }
4114
4115 // 0 = buffers, 1 = images, 2 = buffers and images
4116 size_t buffersVsImagesCount = 2;
4117 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4118 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4119 {
4120 std::string desc2 = desc1;
4121 switch(buffersVsImagesIndex)
4122 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004123 case 0: desc2 += ",Buffers"; break;
4124 case 1: desc2 += ",Images"; break;
4125 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004126 default: assert(0);
4127 }
4128
4129 // 0 = small, 1 = large, 2 = small and large
4130 size_t smallVsLargeCount = 2;
4131 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4132 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4133 {
4134 std::string desc3 = desc2;
4135 switch(smallVsLargeIndex)
4136 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004137 case 0: desc3 += ",Small"; break;
4138 case 1: desc3 += ",Large"; break;
4139 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004140 default: assert(0);
4141 }
4142
4143 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4144 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4145 else
4146 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4147
4148 // 0 = varying sizes min...max, 1 = set of constant sizes
4149 size_t constantSizesCount = 1;
4150 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4151 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4152 {
4153 std::string desc4 = desc3;
4154 switch(constantSizesIndex)
4155 {
4156 case 0: desc4 += " Varying_sizes"; break;
4157 case 1: desc4 += " Constant_sizes"; break;
4158 default: assert(0);
4159 }
4160
4161 config.AllocationSizes.clear();
4162 // Buffers present
4163 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4164 {
4165 // Small
4166 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4167 {
4168 // Varying size
4169 if(constantSizesIndex == 0)
4170 config.AllocationSizes.push_back({4, 16, 1024});
4171 // Constant sizes
4172 else
4173 {
4174 config.AllocationSizes.push_back({1, 16, 16});
4175 config.AllocationSizes.push_back({1, 64, 64});
4176 config.AllocationSizes.push_back({1, 256, 256});
4177 config.AllocationSizes.push_back({1, 1024, 1024});
4178 }
4179 }
4180 // Large
4181 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4182 {
4183 // Varying size
4184 if(constantSizesIndex == 0)
4185 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4186 // Constant sizes
4187 else
4188 {
4189 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4190 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4191 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4192 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4193 }
4194 }
4195 }
4196 // Images present
4197 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4198 {
4199 // Small
4200 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4201 {
4202 // Varying size
4203 if(constantSizesIndex == 0)
4204 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4205 // Constant sizes
4206 else
4207 {
4208 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4209 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4210 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4211 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4212 }
4213 }
4214 // Large
4215 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4216 {
4217 // Varying size
4218 if(constantSizesIndex == 0)
4219 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4220 // Constant sizes
4221 else
4222 {
4223 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4224 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4225 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4226 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4227 }
4228 }
4229 }
4230
4231 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4232 size_t beginBytesToAllocateCount = 1;
4233 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4234 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4235 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4236 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4237 {
4238 std::string desc5 = desc4;
4239
4240 switch(beginBytesToAllocateIndex)
4241 {
4242 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004243 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004244 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4245 config.AdditionalOperationCount = 0;
4246 break;
4247 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004248 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004249 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4250 config.AdditionalOperationCount = 1024;
4251 break;
4252 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004253 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004254 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4255 config.AdditionalOperationCount = 1024;
4256 break;
4257 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004258 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004259 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4260 config.AdditionalOperationCount = 1024;
4261 break;
4262 default:
4263 assert(0);
4264 }
4265
Adam Sawicki0667e332018-08-24 17:26:44 +02004266 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004267 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004268 std::string desc6 = desc5;
4269 switch(strategyIndex)
4270 {
4271 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004272 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004273 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4274 break;
4275 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004276 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004277 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4278 break;
4279 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004280 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004281 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4282 break;
4283 default:
4284 assert(0);
4285 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004286
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004287 desc6 += ',';
4288 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004289
4290 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004291
4292 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4293 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004294 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004295
4296 Result result{};
4297 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004298 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004299 if(file)
4300 {
4301 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4302 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004303 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004304 }
4305 }
4306 }
4307 }
4308 }
4309 }
4310}
4311
4312static void PerformPoolTests(FILE* file)
4313{
4314 const size_t AVG_RESOURCES_PER_POOL = 300;
4315
4316 uint32_t repeatCount = 1;
4317 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4318
4319 PoolTestConfig config{};
4320 config.RandSeed = 2346343;
4321 config.FrameCount = 200;
4322 config.ItemsToMakeUnusedPercent = 2;
4323
4324 size_t threadCountCount = 1;
4325 switch(ConfigType)
4326 {
4327 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4328 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4329 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4330 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4331 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4332 default: assert(0);
4333 }
4334 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4335 {
4336 std::string desc1;
4337
4338 switch(threadCountIndex)
4339 {
4340 case 0:
4341 desc1 += "1_thread";
4342 config.ThreadCount = 1;
4343 break;
4344 case 1:
4345 desc1 += "16_threads";
4346 config.ThreadCount = 16;
4347 break;
4348 case 2:
4349 desc1 += "2_threads";
4350 config.ThreadCount = 2;
4351 break;
4352 default:
4353 assert(0);
4354 }
4355
4356 // 0 = buffers, 1 = images, 2 = buffers and images
4357 size_t buffersVsImagesCount = 2;
4358 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4359 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4360 {
4361 std::string desc2 = desc1;
4362 switch(buffersVsImagesIndex)
4363 {
4364 case 0: desc2 += " Buffers"; break;
4365 case 1: desc2 += " Images"; break;
4366 case 2: desc2 += " Buffers+Images"; break;
4367 default: assert(0);
4368 }
4369
4370 // 0 = small, 1 = large, 2 = small and large
4371 size_t smallVsLargeCount = 2;
4372 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4373 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4374 {
4375 std::string desc3 = desc2;
4376 switch(smallVsLargeIndex)
4377 {
4378 case 0: desc3 += " Small"; break;
4379 case 1: desc3 += " Large"; break;
4380 case 2: desc3 += " Small+Large"; break;
4381 default: assert(0);
4382 }
4383
4384 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4385 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4386 else
4387 config.PoolSize = 4ull * 1024 * 1024;
4388
4389 // 0 = varying sizes min...max, 1 = set of constant sizes
4390 size_t constantSizesCount = 1;
4391 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4392 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4393 {
4394 std::string desc4 = desc3;
4395 switch(constantSizesIndex)
4396 {
4397 case 0: desc4 += " Varying_sizes"; break;
4398 case 1: desc4 += " Constant_sizes"; break;
4399 default: assert(0);
4400 }
4401
4402 config.AllocationSizes.clear();
4403 // Buffers present
4404 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4405 {
4406 // Small
4407 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4408 {
4409 // Varying size
4410 if(constantSizesIndex == 0)
4411 config.AllocationSizes.push_back({4, 16, 1024});
4412 // Constant sizes
4413 else
4414 {
4415 config.AllocationSizes.push_back({1, 16, 16});
4416 config.AllocationSizes.push_back({1, 64, 64});
4417 config.AllocationSizes.push_back({1, 256, 256});
4418 config.AllocationSizes.push_back({1, 1024, 1024});
4419 }
4420 }
4421 // Large
4422 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4423 {
4424 // Varying size
4425 if(constantSizesIndex == 0)
4426 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4427 // Constant sizes
4428 else
4429 {
4430 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4431 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4432 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4433 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4434 }
4435 }
4436 }
4437 // Images present
4438 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4439 {
4440 // Small
4441 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4442 {
4443 // Varying size
4444 if(constantSizesIndex == 0)
4445 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4446 // Constant sizes
4447 else
4448 {
4449 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4450 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4451 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4452 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4453 }
4454 }
4455 // Large
4456 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4457 {
4458 // Varying size
4459 if(constantSizesIndex == 0)
4460 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4461 // Constant sizes
4462 else
4463 {
4464 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4465 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4466 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4467 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4468 }
4469 }
4470 }
4471
4472 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4473 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4474
4475 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4476 size_t subscriptionModeCount;
4477 switch(ConfigType)
4478 {
4479 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4480 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4481 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4482 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4483 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4484 default: assert(0);
4485 }
4486 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4487 {
4488 std::string desc5 = desc4;
4489
4490 switch(subscriptionModeIndex)
4491 {
4492 case 0:
4493 desc5 += " Subscription_66%";
4494 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4495 break;
4496 case 1:
4497 desc5 += " Subscription_133%";
4498 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4499 break;
4500 case 2:
4501 desc5 += " Subscription_100%";
4502 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4503 break;
4504 case 3:
4505 desc5 += " Subscription_33%";
4506 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4507 break;
4508 case 4:
4509 desc5 += " Subscription_166%";
4510 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4511 break;
4512 default:
4513 assert(0);
4514 }
4515
4516 config.TotalItemCount = config.UsedItemCountMax * 5;
4517 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4518
4519 const char* testDescription = desc5.c_str();
4520
4521 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4522 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004523 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004524
4525 PoolTestResult result{};
4526 g_MemoryAliasingWarningEnabled = false;
4527 TestPool_Benchmark(result, config);
4528 g_MemoryAliasingWarningEnabled = true;
4529 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4530 }
4531 }
4532 }
4533 }
4534 }
4535 }
4536}
4537
Adam Sawickia83793a2018-09-03 13:40:42 +02004538static void BasicTestBuddyAllocator()
4539{
4540 wprintf(L"Basic test buddy allocator\n");
4541
4542 RandomNumberGenerator rand{76543};
4543
4544 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4545 sampleBufCreateInfo.size = 1024; // Whatever.
4546 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4547
4548 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4549 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4550
4551 VmaPoolCreateInfo poolCreateInfo = {};
4552 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004553 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004554
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004555 // Deliberately adding 1023 to test usable size smaller than memory block size.
4556 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004557 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004558 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004559
4560 VmaPool pool = nullptr;
4561 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004562 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004563
4564 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4565
4566 VmaAllocationCreateInfo allocCreateInfo = {};
4567 allocCreateInfo.pool = pool;
4568
4569 std::vector<BufferInfo> bufInfo;
4570 BufferInfo newBufInfo;
4571 VmaAllocationInfo allocInfo;
4572
4573 bufCreateInfo.size = 1024 * 256;
4574 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4575 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004576 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004577 bufInfo.push_back(newBufInfo);
4578
4579 bufCreateInfo.size = 1024 * 512;
4580 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4581 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004582 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004583 bufInfo.push_back(newBufInfo);
4584
4585 bufCreateInfo.size = 1024 * 128;
4586 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4587 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004588 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004589 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004590
4591 // Test very small allocation, smaller than minimum node size.
4592 bufCreateInfo.size = 1;
4593 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4594 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004595 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004596 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004597
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004598 // Test some small allocation with alignment requirement.
4599 {
4600 VkMemoryRequirements memReq;
4601 memReq.alignment = 256;
4602 memReq.memoryTypeBits = UINT32_MAX;
4603 memReq.size = 32;
4604
4605 newBufInfo.Buffer = VK_NULL_HANDLE;
4606 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4607 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004608 TEST(res == VK_SUCCESS);
4609 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004610 bufInfo.push_back(newBufInfo);
4611 }
4612
4613 //SaveAllocatorStatsToFile(L"TEST.json");
4614
Adam Sawicki21017c62018-09-07 15:26:59 +02004615 VmaPoolStats stats = {};
4616 vmaGetPoolStats(g_hAllocator, pool, &stats);
4617 int DBG = 0; // Set breakpoint here to inspect `stats`.
4618
Adam Sawicki80927152018-09-07 17:27:23 +02004619 // Allocate enough new buffers to surely fall into second block.
4620 for(uint32_t i = 0; i < 32; ++i)
4621 {
4622 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4623 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4624 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004625 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004626 bufInfo.push_back(newBufInfo);
4627 }
4628
4629 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4630
Adam Sawickia83793a2018-09-03 13:40:42 +02004631 // Destroy the buffers in random order.
4632 while(!bufInfo.empty())
4633 {
4634 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4635 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4636 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4637 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4638 }
4639
4640 vmaDestroyPool(g_hAllocator, pool);
4641}
4642
Adam Sawickif2975342018-10-16 13:49:02 +02004643// Test the testing environment.
4644static void TestGpuData()
4645{
4646 RandomNumberGenerator rand = { 53434 };
4647
4648 std::vector<AllocInfo> allocInfo;
4649
4650 for(size_t i = 0; i < 100; ++i)
4651 {
4652 AllocInfo info = {};
4653
4654 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4655 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
4656 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
4657 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4658 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
4659
4660 VmaAllocationCreateInfo allocCreateInfo = {};
4661 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4662
4663 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
4664 TEST(res == VK_SUCCESS);
4665
4666 info.m_StartValue = rand.Generate();
4667
4668 allocInfo.push_back(std::move(info));
4669 }
4670
4671 UploadGpuData(allocInfo.data(), allocInfo.size());
4672
4673 ValidateGpuData(allocInfo.data(), allocInfo.size());
4674
4675 DestroyAllAllocations(allocInfo);
4676}
4677
Adam Sawickib8333fb2018-03-13 16:15:53 +01004678void Test()
4679{
4680 wprintf(L"TESTING:\n");
4681
Adam Sawickif2975342018-10-16 13:49:02 +02004682 if(true)
Adam Sawicki70a683e2018-08-24 15:36:32 +02004683 {
4684 // # Temporarily insert custom tests here
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004685 // ########################################
4686 // ########################################
Adam Sawicki80927152018-09-07 17:27:23 +02004687
Adam Sawickiff0f7b82018-10-18 14:44:05 +02004688 TestDefragmentationGpu();
4689 TestDefragmentationSimple();
4690 TestDefragmentationFull();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004691 return;
4692 }
4693
Adam Sawickib8333fb2018-03-13 16:15:53 +01004694 // # Simple tests
4695
4696 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02004697 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02004698#if VMA_DEBUG_MARGIN
4699 TestDebugMargin();
4700#else
4701 TestPool_SameSize();
4702 TestHeapSizeLimit();
4703#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02004704#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
4705 TestAllocationsInitialization();
4706#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01004707 TestMapping();
4708 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02004709 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02004710 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004711 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004712
Adam Sawicki4338f662018-09-07 14:12:37 +02004713 BasicTestBuddyAllocator();
4714
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004715 {
4716 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02004717 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004718 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02004719 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004720 fclose(file);
4721 }
4722
Adam Sawickib8333fb2018-03-13 16:15:53 +01004723 TestDefragmentationSimple();
4724 TestDefragmentationFull();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02004725 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01004726
4727 // # Detailed tests
4728 FILE* file;
4729 fopen_s(&file, "Results.csv", "w");
4730 assert(file != NULL);
4731
4732 WriteMainTestResultHeader(file);
4733 PerformMainTests(file);
4734 //PerformCustomMainTest(file);
4735
4736 WritePoolTestResultHeader(file);
4737 PerformPoolTests(file);
4738 //PerformCustomPoolTest(file);
4739
4740 fclose(file);
4741
4742 wprintf(L"Done.\n");
4743}
4744
Adam Sawickif1a793c2018-03-13 15:42:22 +01004745#endif // #ifdef _WIN32