blob: d34c34f82a6ab4781f5ee8647f8b9d934f3e51e7 [file] [log] [blame]
Adam Sawickif1a793c2018-03-13 15:42:22 +01001#include "Tests.h"
2#include "VmaUsage.h"
3#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004#include <atomic>
5#include <thread>
6#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +01007
8#ifdef _WIN32
9
Adam Sawicki33d2ce72018-08-27 13:59:13 +020010static const char* CODE_DESCRIPTION = "Foo";
11
Adam Sawickif2975342018-10-16 13:49:02 +020012extern VkCommandBuffer g_hTemporaryCommandBuffer;
13void BeginSingleTimeCommands();
14void EndSingleTimeCommands();
15
Adam Sawicki0a607132018-08-24 11:18:41 +020016enum CONFIG_TYPE {
17 CONFIG_TYPE_MINIMUM,
18 CONFIG_TYPE_SMALL,
19 CONFIG_TYPE_AVERAGE,
20 CONFIG_TYPE_LARGE,
21 CONFIG_TYPE_MAXIMUM,
22 CONFIG_TYPE_COUNT
23};
24
Adam Sawickif2975342018-10-16 13:49:02 +020025static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
26//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020027
Adam Sawickib8333fb2018-03-13 16:15:53 +010028enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
29
Adam Sawicki0667e332018-08-24 17:26:44 +020030static const char* FREE_ORDER_NAMES[] = {
31 "FORWARD",
32 "BACKWARD",
33 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020034};
35
Adam Sawicki80927152018-09-07 17:27:23 +020036// Copy of internal VmaAlgorithmToStr.
37static const char* AlgorithmToStr(uint32_t algorithm)
38{
39 switch(algorithm)
40 {
41 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
42 return "Linear";
43 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
44 return "Buddy";
45 case 0:
46 return "Default";
47 default:
48 assert(0);
49 return "";
50 }
51}
52
Adam Sawickib8333fb2018-03-13 16:15:53 +010053struct AllocationSize
54{
55 uint32_t Probability;
56 VkDeviceSize BufferSizeMin, BufferSizeMax;
57 uint32_t ImageSizeMin, ImageSizeMax;
58};
59
60struct Config
61{
62 uint32_t RandSeed;
63 VkDeviceSize BeginBytesToAllocate;
64 uint32_t AdditionalOperationCount;
65 VkDeviceSize MaxBytesToAllocate;
66 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
67 std::vector<AllocationSize> AllocationSizes;
68 uint32_t ThreadCount;
69 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
70 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020071 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010072};
73
74struct Result
75{
76 duration TotalTime;
77 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
78 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
79 VkDeviceSize TotalMemoryAllocated;
80 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
81};
82
83void TestDefragmentationSimple();
84void TestDefragmentationFull();
85
86struct PoolTestConfig
87{
88 uint32_t RandSeed;
89 uint32_t ThreadCount;
90 VkDeviceSize PoolSize;
91 uint32_t FrameCount;
92 uint32_t TotalItemCount;
93 // Range for number of items used in each frame.
94 uint32_t UsedItemCountMin, UsedItemCountMax;
95 // Percent of items to make unused, and possibly make some others used in each frame.
96 uint32_t ItemsToMakeUnusedPercent;
97 std::vector<AllocationSize> AllocationSizes;
98
99 VkDeviceSize CalcAvgResourceSize() const
100 {
101 uint32_t probabilitySum = 0;
102 VkDeviceSize sizeSum = 0;
103 for(size_t i = 0; i < AllocationSizes.size(); ++i)
104 {
105 const AllocationSize& allocSize = AllocationSizes[i];
106 if(allocSize.BufferSizeMax > 0)
107 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
108 else
109 {
110 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
111 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
112 }
113 probabilitySum += allocSize.Probability;
114 }
115 return sizeSum / probabilitySum;
116 }
117
118 bool UsesBuffers() const
119 {
120 for(size_t i = 0; i < AllocationSizes.size(); ++i)
121 if(AllocationSizes[i].BufferSizeMax > 0)
122 return true;
123 return false;
124 }
125
126 bool UsesImages() const
127 {
128 for(size_t i = 0; i < AllocationSizes.size(); ++i)
129 if(AllocationSizes[i].ImageSizeMax > 0)
130 return true;
131 return false;
132 }
133};
134
135struct PoolTestResult
136{
137 duration TotalTime;
138 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
139 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
140 size_t LostAllocationCount, LostAllocationTotalSize;
141 size_t FailedAllocationCount, FailedAllocationTotalSize;
142};
143
144static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
145
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200146static uint32_t g_FrameIndex = 0;
147
Adam Sawickib8333fb2018-03-13 16:15:53 +0100148struct BufferInfo
149{
150 VkBuffer Buffer = VK_NULL_HANDLE;
151 VmaAllocation Allocation = VK_NULL_HANDLE;
152};
153
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200154static uint32_t GetAllocationStrategyCount()
155{
156 uint32_t strategyCount = 0;
157 switch(ConfigType)
158 {
159 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
160 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
161 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
162 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
163 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
164 default: assert(0);
165 }
166 return strategyCount;
167}
168
169static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
170{
171 switch(allocStrategy)
172 {
173 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
174 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
175 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
176 case 0: return "Default"; break;
177 default: assert(0); return "";
178 }
179}
180
Adam Sawickib8333fb2018-03-13 16:15:53 +0100181static void InitResult(Result& outResult)
182{
183 outResult.TotalTime = duration::zero();
184 outResult.AllocationTimeMin = duration::max();
185 outResult.AllocationTimeAvg = duration::zero();
186 outResult.AllocationTimeMax = duration::min();
187 outResult.DeallocationTimeMin = duration::max();
188 outResult.DeallocationTimeAvg = duration::zero();
189 outResult.DeallocationTimeMax = duration::min();
190 outResult.TotalMemoryAllocated = 0;
191 outResult.FreeRangeSizeAvg = 0;
192 outResult.FreeRangeSizeMax = 0;
193}
194
195class TimeRegisterObj
196{
197public:
198 TimeRegisterObj(duration& min, duration& sum, duration& max) :
199 m_Min(min),
200 m_Sum(sum),
201 m_Max(max),
202 m_TimeBeg(std::chrono::high_resolution_clock::now())
203 {
204 }
205
206 ~TimeRegisterObj()
207 {
208 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
209 m_Sum += d;
210 if(d < m_Min) m_Min = d;
211 if(d > m_Max) m_Max = d;
212 }
213
214private:
215 duration& m_Min;
216 duration& m_Sum;
217 duration& m_Max;
218 time_point m_TimeBeg;
219};
220
221struct PoolTestThreadResult
222{
223 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
224 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
225 size_t AllocationCount, DeallocationCount;
226 size_t LostAllocationCount, LostAllocationTotalSize;
227 size_t FailedAllocationCount, FailedAllocationTotalSize;
228};
229
230class AllocationTimeRegisterObj : public TimeRegisterObj
231{
232public:
233 AllocationTimeRegisterObj(Result& result) :
234 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
235 {
236 }
237};
238
239class DeallocationTimeRegisterObj : public TimeRegisterObj
240{
241public:
242 DeallocationTimeRegisterObj(Result& result) :
243 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
244 {
245 }
246};
247
248class PoolAllocationTimeRegisterObj : public TimeRegisterObj
249{
250public:
251 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
252 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
253 {
254 }
255};
256
257class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
258{
259public:
260 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
261 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
262 {
263 }
264};
265
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200266static void CurrentTimeToStr(std::string& out)
267{
268 time_t rawTime; time(&rawTime);
269 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
270 char timeStr[128];
271 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
272 out = timeStr;
273}
274
Adam Sawickib8333fb2018-03-13 16:15:53 +0100275VkResult MainTest(Result& outResult, const Config& config)
276{
277 assert(config.ThreadCount > 0);
278
279 InitResult(outResult);
280
281 RandomNumberGenerator mainRand{config.RandSeed};
282
283 time_point timeBeg = std::chrono::high_resolution_clock::now();
284
285 std::atomic<size_t> allocationCount = 0;
286 VkResult res = VK_SUCCESS;
287
288 uint32_t memUsageProbabilitySum =
289 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
290 config.MemUsageProbability[2] + config.MemUsageProbability[3];
291 assert(memUsageProbabilitySum > 0);
292
293 uint32_t allocationSizeProbabilitySum = std::accumulate(
294 config.AllocationSizes.begin(),
295 config.AllocationSizes.end(),
296 0u,
297 [](uint32_t sum, const AllocationSize& allocSize) {
298 return sum + allocSize.Probability;
299 });
300
301 struct Allocation
302 {
303 VkBuffer Buffer;
304 VkImage Image;
305 VmaAllocation Alloc;
306 };
307
308 std::vector<Allocation> commonAllocations;
309 std::mutex commonAllocationsMutex;
310
311 auto Allocate = [&](
312 VkDeviceSize bufferSize,
313 const VkExtent2D imageExtent,
314 RandomNumberGenerator& localRand,
315 VkDeviceSize& totalAllocatedBytes,
316 std::vector<Allocation>& allocations) -> VkResult
317 {
318 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
319
320 uint32_t memUsageIndex = 0;
321 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
322 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
323 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
324
325 VmaAllocationCreateInfo memReq = {};
326 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200327 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100328
329 Allocation allocation = {};
330 VmaAllocationInfo allocationInfo;
331
332 // Buffer
333 if(bufferSize > 0)
334 {
335 assert(imageExtent.width == 0);
336 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
337 bufferInfo.size = bufferSize;
338 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
339
340 {
341 AllocationTimeRegisterObj timeRegisterObj{outResult};
342 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
343 }
344 }
345 // Image
346 else
347 {
348 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
349 imageInfo.imageType = VK_IMAGE_TYPE_2D;
350 imageInfo.extent.width = imageExtent.width;
351 imageInfo.extent.height = imageExtent.height;
352 imageInfo.extent.depth = 1;
353 imageInfo.mipLevels = 1;
354 imageInfo.arrayLayers = 1;
355 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
356 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
357 VK_IMAGE_TILING_OPTIMAL :
358 VK_IMAGE_TILING_LINEAR;
359 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
360 switch(memReq.usage)
361 {
362 case VMA_MEMORY_USAGE_GPU_ONLY:
363 switch(localRand.Generate() % 3)
364 {
365 case 0:
366 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
367 break;
368 case 1:
369 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
370 break;
371 case 2:
372 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
373 break;
374 }
375 break;
376 case VMA_MEMORY_USAGE_CPU_ONLY:
377 case VMA_MEMORY_USAGE_CPU_TO_GPU:
378 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
379 break;
380 case VMA_MEMORY_USAGE_GPU_TO_CPU:
381 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
382 break;
383 }
384 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
385 imageInfo.flags = 0;
386
387 {
388 AllocationTimeRegisterObj timeRegisterObj{outResult};
389 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
390 }
391 }
392
393 if(res == VK_SUCCESS)
394 {
395 ++allocationCount;
396 totalAllocatedBytes += allocationInfo.size;
397 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
398 if(useCommonAllocations)
399 {
400 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
401 commonAllocations.push_back(allocation);
402 }
403 else
404 allocations.push_back(allocation);
405 }
406 else
407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200408 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100409 }
410 return res;
411 };
412
413 auto GetNextAllocationSize = [&](
414 VkDeviceSize& outBufSize,
415 VkExtent2D& outImageSize,
416 RandomNumberGenerator& localRand)
417 {
418 outBufSize = 0;
419 outImageSize = {0, 0};
420
421 uint32_t allocSizeIndex = 0;
422 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
423 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
424 r -= config.AllocationSizes[allocSizeIndex++].Probability;
425
426 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
427 if(allocSize.BufferSizeMax > 0)
428 {
429 assert(allocSize.ImageSizeMax == 0);
430 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
431 outBufSize = allocSize.BufferSizeMin;
432 else
433 {
434 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
435 outBufSize = outBufSize / 16 * 16;
436 }
437 }
438 else
439 {
440 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
441 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
442 else
443 {
444 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
445 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
446 }
447 }
448 };
449
450 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
451 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
452
453 auto ThreadProc = [&](uint32_t randSeed) -> void
454 {
455 RandomNumberGenerator threadRand(randSeed);
456 VkDeviceSize threadTotalAllocatedBytes = 0;
457 std::vector<Allocation> threadAllocations;
458 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
459 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
460 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
461
462 // BEGIN ALLOCATIONS
463 for(;;)
464 {
465 VkDeviceSize bufferSize = 0;
466 VkExtent2D imageExtent = {};
467 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
468 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
469 threadBeginBytesToAllocate)
470 {
471 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
472 break;
473 }
474 else
475 break;
476 }
477
478 // ADDITIONAL ALLOCATIONS AND FREES
479 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
480 {
481 VkDeviceSize bufferSize = 0;
482 VkExtent2D imageExtent = {};
483 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
484
485 // true = allocate, false = free
486 bool allocate = threadRand.Generate() % 2 != 0;
487
488 if(allocate)
489 {
490 if(threadTotalAllocatedBytes +
491 bufferSize +
492 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
493 threadMaxBytesToAllocate)
494 {
495 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
496 break;
497 }
498 }
499 else
500 {
501 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
502 if(useCommonAllocations)
503 {
504 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
505 if(!commonAllocations.empty())
506 {
507 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
508 VmaAllocationInfo allocationInfo;
509 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
510 if(threadTotalAllocatedBytes >= allocationInfo.size)
511 {
512 DeallocationTimeRegisterObj timeRegisterObj{outResult};
513 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
514 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
515 else
516 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
517 threadTotalAllocatedBytes -= allocationInfo.size;
518 commonAllocations.erase(commonAllocations.begin() + indexToFree);
519 }
520 }
521 }
522 else
523 {
524 if(!threadAllocations.empty())
525 {
526 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
527 VmaAllocationInfo allocationInfo;
528 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
529 if(threadTotalAllocatedBytes >= allocationInfo.size)
530 {
531 DeallocationTimeRegisterObj timeRegisterObj{outResult};
532 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
533 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
534 else
535 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
536 threadTotalAllocatedBytes -= allocationInfo.size;
537 threadAllocations.erase(threadAllocations.begin() + indexToFree);
538 }
539 }
540 }
541 }
542 }
543
544 ++numThreadsReachedMaxAllocations;
545
546 WaitForSingleObject(threadsFinishEvent, INFINITE);
547
548 // DEALLOCATION
549 while(!threadAllocations.empty())
550 {
551 size_t indexToFree = 0;
552 switch(config.FreeOrder)
553 {
554 case FREE_ORDER::FORWARD:
555 indexToFree = 0;
556 break;
557 case FREE_ORDER::BACKWARD:
558 indexToFree = threadAllocations.size() - 1;
559 break;
560 case FREE_ORDER::RANDOM:
561 indexToFree = mainRand.Generate() % threadAllocations.size();
562 break;
563 }
564
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 }
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 };
575
576 uint32_t threadRandSeed = mainRand.Generate();
577 std::vector<std::thread> bkgThreads;
578 for(size_t i = 0; i < config.ThreadCount; ++i)
579 {
580 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
581 }
582
583 // Wait for threads reached max allocations
584 while(numThreadsReachedMaxAllocations < config.ThreadCount)
585 Sleep(0);
586
587 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
588 VmaStats vmaStats = {};
589 vmaCalculateStats(g_hAllocator, &vmaStats);
590 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
591 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
592 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
593
594 // Signal threads to deallocate
595 SetEvent(threadsFinishEvent);
596
597 // Wait for threads finished
598 for(size_t i = 0; i < bkgThreads.size(); ++i)
599 bkgThreads[i].join();
600 bkgThreads.clear();
601
602 CloseHandle(threadsFinishEvent);
603
604 // Deallocate remaining common resources
605 while(!commonAllocations.empty())
606 {
607 size_t indexToFree = 0;
608 switch(config.FreeOrder)
609 {
610 case FREE_ORDER::FORWARD:
611 indexToFree = 0;
612 break;
613 case FREE_ORDER::BACKWARD:
614 indexToFree = commonAllocations.size() - 1;
615 break;
616 case FREE_ORDER::RANDOM:
617 indexToFree = mainRand.Generate() % commonAllocations.size();
618 break;
619 }
620
621 {
622 DeallocationTimeRegisterObj timeRegisterObj{outResult};
623 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
624 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
625 else
626 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
627 }
628 commonAllocations.erase(commonAllocations.begin() + indexToFree);
629 }
630
631 if(allocationCount)
632 {
633 outResult.AllocationTimeAvg /= allocationCount;
634 outResult.DeallocationTimeAvg /= allocationCount;
635 }
636
637 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
638
639 return res;
640}
641
Adam Sawickie44c6262018-06-15 14:30:39 +0200642static void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100643{
644 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200645 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100646 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200647 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100648}
649
650struct AllocInfo
651{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200652 VmaAllocation m_Allocation = VK_NULL_HANDLE;
653 VkBuffer m_Buffer = VK_NULL_HANDLE;
654 VkImage m_Image = VK_NULL_HANDLE;
655 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100656 union
657 {
658 VkBufferCreateInfo m_BufferInfo;
659 VkImageCreateInfo m_ImageInfo;
660 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200661
662 void CreateBuffer(
663 const VkBufferCreateInfo& bufCreateInfo,
664 const VmaAllocationCreateInfo& allocCreateInfo);
665 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100666};
667
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200668void AllocInfo::CreateBuffer(
669 const VkBufferCreateInfo& bufCreateInfo,
670 const VmaAllocationCreateInfo& allocCreateInfo)
671{
672 m_BufferInfo = bufCreateInfo;
673 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
674 TEST(res == VK_SUCCESS);
675}
676
677void AllocInfo::Destroy()
678{
679 if(m_Image)
680 {
681 vkDestroyImage(g_hDevice, m_Image, nullptr);
682 }
683 if(m_Buffer)
684 {
685 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
686 }
687 if(m_Allocation)
688 {
689 vmaFreeMemory(g_hAllocator, m_Allocation);
690 }
691}
692
Adam Sawickif2975342018-10-16 13:49:02 +0200693class StagingBufferCollection
694{
695public:
696 StagingBufferCollection() { }
697 ~StagingBufferCollection();
698 // Returns false if maximum total size of buffers would be exceeded.
699 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
700 void ReleaseAllBuffers();
701
702private:
703 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
704 struct BufInfo
705 {
706 VmaAllocation Allocation = VK_NULL_HANDLE;
707 VkBuffer Buffer = VK_NULL_HANDLE;
708 VkDeviceSize Size = VK_WHOLE_SIZE;
709 void* MappedPtr = nullptr;
710 bool Used = false;
711 };
712 std::vector<BufInfo> m_Bufs;
713 // Including both used and unused.
714 VkDeviceSize m_TotalSize = 0;
715};
716
717StagingBufferCollection::~StagingBufferCollection()
718{
719 for(size_t i = m_Bufs.size(); i--; )
720 {
721 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
722 }
723}
724
725bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
726{
727 assert(size <= MAX_TOTAL_SIZE);
728
729 // Try to find existing unused buffer with best size.
730 size_t bestIndex = SIZE_MAX;
731 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
732 {
733 BufInfo& currBufInfo = m_Bufs[i];
734 if(!currBufInfo.Used && currBufInfo.Size >= size &&
735 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
736 {
737 bestIndex = i;
738 }
739 }
740
741 if(bestIndex != SIZE_MAX)
742 {
743 m_Bufs[bestIndex].Used = true;
744 outBuffer = m_Bufs[bestIndex].Buffer;
745 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
746 return true;
747 }
748
749 // Allocate new buffer with requested size.
750 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
751 {
752 BufInfo bufInfo;
753 bufInfo.Size = size;
754 bufInfo.Used = true;
755
756 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
757 bufCreateInfo.size = size;
758 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
759
760 VmaAllocationCreateInfo allocCreateInfo = {};
761 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
762 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
763
764 VmaAllocationInfo allocInfo;
765 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
766 bufInfo.MappedPtr = allocInfo.pMappedData;
767 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
768
769 outBuffer = bufInfo.Buffer;
770 outMappedPtr = bufInfo.MappedPtr;
771
772 m_Bufs.push_back(std::move(bufInfo));
773
774 m_TotalSize += size;
775
776 return true;
777 }
778
779 // There are some unused but smaller buffers: Free them and try again.
780 bool hasUnused = false;
781 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
782 {
783 if(!m_Bufs[i].Used)
784 {
785 hasUnused = true;
786 break;
787 }
788 }
789 if(hasUnused)
790 {
791 for(size_t i = m_Bufs.size(); i--; )
792 {
793 if(!m_Bufs[i].Used)
794 {
795 m_TotalSize -= m_Bufs[i].Size;
796 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
797 m_Bufs.erase(m_Bufs.begin() + i);
798 }
799 }
800
801 return AcquireBuffer(size, outBuffer, outMappedPtr);
802 }
803
804 return false;
805}
806
807void StagingBufferCollection::ReleaseAllBuffers()
808{
809 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
810 {
811 m_Bufs[i].Used = false;
812 }
813}
814
815static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
816{
817 StagingBufferCollection stagingBufs;
818
819 bool cmdBufferStarted = false;
820 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
821 {
822 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
823 if(currAllocInfo.m_Buffer)
824 {
825 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
826
827 VkBuffer stagingBuf = VK_NULL_HANDLE;
828 void* stagingBufMappedPtr = nullptr;
829 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
830 {
831 TEST(cmdBufferStarted);
832 EndSingleTimeCommands();
833 stagingBufs.ReleaseAllBuffers();
834 cmdBufferStarted = false;
835
836 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
837 TEST(ok);
838 }
839
840 // Fill staging buffer.
841 {
842 assert(size % sizeof(uint32_t) == 0);
843 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
844 uint32_t val = currAllocInfo.m_StartValue;
845 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
846 {
847 *stagingValPtr = val;
848 ++stagingValPtr;
849 ++val;
850 }
851 }
852
853 // Issue copy command from staging buffer to destination buffer.
854 if(!cmdBufferStarted)
855 {
856 cmdBufferStarted = true;
857 BeginSingleTimeCommands();
858 }
859
860 VkBufferCopy copy = {};
861 copy.srcOffset = 0;
862 copy.dstOffset = 0;
863 copy.size = size;
864 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
865 }
866 else
867 {
868 TEST(0 && "Images not currently supported.");
869 }
870 }
871
872 if(cmdBufferStarted)
873 {
874 EndSingleTimeCommands();
875 stagingBufs.ReleaseAllBuffers();
876 }
877}
878
879static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
880{
881 StagingBufferCollection stagingBufs;
882
883 bool cmdBufferStarted = false;
884 size_t validateAllocIndexOffset = 0;
885 std::vector<void*> validateStagingBuffers;
886 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
887 {
888 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
889 if(currAllocInfo.m_Buffer)
890 {
891 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
892
893 VkBuffer stagingBuf = VK_NULL_HANDLE;
894 void* stagingBufMappedPtr = nullptr;
895 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
896 {
897 TEST(cmdBufferStarted);
898 EndSingleTimeCommands();
899 cmdBufferStarted = false;
900
901 for(size_t validateIndex = 0;
902 validateIndex < validateStagingBuffers.size();
903 ++validateIndex)
904 {
905 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
906 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
907 TEST(validateSize % sizeof(uint32_t) == 0);
908 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
909 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
910 bool valid = true;
911 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
912 {
913 if(*stagingValPtr != val)
914 {
915 valid = false;
916 break;
917 }
918 ++stagingValPtr;
919 ++val;
920 }
921 TEST(valid);
922 }
923
924 stagingBufs.ReleaseAllBuffers();
925
926 validateAllocIndexOffset = allocInfoIndex;
927 validateStagingBuffers.clear();
928
929 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
930 TEST(ok);
931 }
932
933 // Issue copy command from staging buffer to destination buffer.
934 if(!cmdBufferStarted)
935 {
936 cmdBufferStarted = true;
937 BeginSingleTimeCommands();
938 }
939
940 VkBufferCopy copy = {};
941 copy.srcOffset = 0;
942 copy.dstOffset = 0;
943 copy.size = size;
944 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
945
946 // Sava mapped pointer for later validation.
947 validateStagingBuffers.push_back(stagingBufMappedPtr);
948 }
949 else
950 {
951 TEST(0 && "Images not currently supported.");
952 }
953 }
954
955 if(cmdBufferStarted)
956 {
957 EndSingleTimeCommands();
958
959 for(size_t validateIndex = 0;
960 validateIndex < validateStagingBuffers.size();
961 ++validateIndex)
962 {
963 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
964 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
965 TEST(validateSize % sizeof(uint32_t) == 0);
966 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
967 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
968 bool valid = true;
969 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
970 {
971 if(*stagingValPtr != val)
972 {
973 valid = false;
974 break;
975 }
976 ++stagingValPtr;
977 ++val;
978 }
979 TEST(valid);
980 }
981
982 stagingBufs.ReleaseAllBuffers();
983 }
984}
985
Adam Sawickib8333fb2018-03-13 16:15:53 +0100986static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
987{
988 outMemReq = {};
989 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
990 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
991}
992
993static void CreateBuffer(
994 VmaPool pool,
995 const VkBufferCreateInfo& bufCreateInfo,
996 bool persistentlyMapped,
997 AllocInfo& outAllocInfo)
998{
999 outAllocInfo = {};
1000 outAllocInfo.m_BufferInfo = bufCreateInfo;
1001
1002 VmaAllocationCreateInfo allocCreateInfo = {};
1003 allocCreateInfo.pool = pool;
1004 if(persistentlyMapped)
1005 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1006
1007 VmaAllocationInfo vmaAllocInfo = {};
1008 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1009
1010 // Setup StartValue and fill.
1011 {
1012 outAllocInfo.m_StartValue = (uint32_t)rand();
1013 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001014 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015 if(!persistentlyMapped)
1016 {
1017 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1018 }
1019
1020 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001021 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001022 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1023 data[i] = value++;
1024
1025 if(!persistentlyMapped)
1026 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1027 }
1028}
1029
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001030static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001031{
1032 outAllocation.m_Allocation = nullptr;
1033 outAllocation.m_Buffer = nullptr;
1034 outAllocation.m_Image = nullptr;
1035 outAllocation.m_StartValue = (uint32_t)rand();
1036
1037 VmaAllocationCreateInfo vmaMemReq;
1038 GetMemReq(vmaMemReq);
1039
1040 VmaAllocationInfo allocInfo;
1041
1042 const bool isBuffer = true;//(rand() & 0x1) != 0;
1043 const bool isLarge = (rand() % 16) == 0;
1044 if(isBuffer)
1045 {
1046 const uint32_t bufferSize = isLarge ?
1047 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1048 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1049
1050 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1051 bufferInfo.size = bufferSize;
1052 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1053
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001054 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001055 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001056 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001057 }
1058 else
1059 {
1060 const uint32_t imageSizeX = isLarge ?
1061 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1062 rand() % 1024 + 1; // 1 ... 1024
1063 const uint32_t imageSizeY = isLarge ?
1064 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1065 rand() % 1024 + 1; // 1 ... 1024
1066
1067 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1068 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1069 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1070 imageInfo.extent.width = imageSizeX;
1071 imageInfo.extent.height = imageSizeY;
1072 imageInfo.extent.depth = 1;
1073 imageInfo.mipLevels = 1;
1074 imageInfo.arrayLayers = 1;
1075 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1076 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1077 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1078 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1079
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001080 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001081 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001082 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 }
1084
1085 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1086 if(allocInfo.pMappedData == nullptr)
1087 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001088 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001089 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001090 }
1091
1092 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001093 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001094 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1095 data[i] = value++;
1096
1097 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001098 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001099}
1100
1101static void DestroyAllocation(const AllocInfo& allocation)
1102{
1103 if(allocation.m_Buffer)
1104 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1105 else
1106 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1107}
1108
1109static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1110{
1111 for(size_t i = allocations.size(); i--; )
1112 DestroyAllocation(allocations[i]);
1113 allocations.clear();
1114}
1115
1116static void ValidateAllocationData(const AllocInfo& allocation)
1117{
1118 VmaAllocationInfo allocInfo;
1119 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1120
1121 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1122 if(allocInfo.pMappedData == nullptr)
1123 {
1124 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126 }
1127
1128 uint32_t value = allocation.m_StartValue;
1129 bool ok = true;
1130 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001131 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001132 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1133 {
1134 if(data[i] != value++)
1135 {
1136 ok = false;
1137 break;
1138 }
1139 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001140 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001141
1142 if(allocInfo.pMappedData == nullptr)
1143 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1144}
1145
1146static void RecreateAllocationResource(AllocInfo& allocation)
1147{
1148 VmaAllocationInfo allocInfo;
1149 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1150
1151 if(allocation.m_Buffer)
1152 {
1153 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1154
1155 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001156 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001157
1158 // Just to silence validation layer warnings.
1159 VkMemoryRequirements vkMemReq;
1160 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001161 TEST(vkMemReq.size == allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001162
1163 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001164 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001165 }
1166 else
1167 {
1168 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1169
1170 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001171 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001172
1173 // Just to silence validation layer warnings.
1174 VkMemoryRequirements vkMemReq;
1175 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1176
1177 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001178 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001179 }
1180}
1181
1182static void Defragment(AllocInfo* allocs, size_t allocCount,
1183 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1184 VmaDefragmentationStats* defragmentationStats = nullptr)
1185{
1186 std::vector<VmaAllocation> vmaAllocs(allocCount);
1187 for(size_t i = 0; i < allocCount; ++i)
1188 vmaAllocs[i] = allocs[i].m_Allocation;
1189
1190 std::vector<VkBool32> allocChanged(allocCount);
1191
1192 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1193 defragmentationInfo, defragmentationStats) );
1194
1195 for(size_t i = 0; i < allocCount; ++i)
1196 {
1197 if(allocChanged[i])
1198 {
1199 RecreateAllocationResource(allocs[i]);
1200 }
1201 }
1202}
1203
1204static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1205{
1206 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1207 ValidateAllocationData(allocInfo);
1208 });
1209}
1210
1211void TestDefragmentationSimple()
1212{
1213 wprintf(L"Test defragmentation simple\n");
1214
1215 RandomNumberGenerator rand(667);
1216
1217 const VkDeviceSize BUF_SIZE = 0x10000;
1218 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1219
1220 const VkDeviceSize MIN_BUF_SIZE = 32;
1221 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1222 auto RandomBufSize = [&]() -> VkDeviceSize {
1223 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1224 };
1225
1226 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1227 bufCreateInfo.size = BUF_SIZE;
1228 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1229
1230 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1231 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1232
1233 uint32_t memTypeIndex = UINT32_MAX;
1234 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1235
1236 VmaPoolCreateInfo poolCreateInfo = {};
1237 poolCreateInfo.blockSize = BLOCK_SIZE;
1238 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1239
1240 VmaPool pool;
1241 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1242
1243 std::vector<AllocInfo> allocations;
1244
1245 // persistentlyMappedOption = 0 - not persistently mapped.
1246 // persistentlyMappedOption = 1 - persistently mapped.
1247 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1248 {
1249 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1250 const bool persistentlyMapped = persistentlyMappedOption != 0;
1251
1252 // # Test 1
1253 // Buffers of fixed size.
1254 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1255 // Expected result: at least 1 block freed.
1256 {
1257 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1258 {
1259 AllocInfo allocInfo;
1260 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1261 allocations.push_back(allocInfo);
1262 }
1263
1264 for(size_t i = 1; i < allocations.size(); ++i)
1265 {
1266 DestroyAllocation(allocations[i]);
1267 allocations.erase(allocations.begin() + i);
1268 }
1269
1270 VmaDefragmentationStats defragStats;
1271 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001272 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1273 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001274
1275 ValidateAllocationsData(allocations.data(), allocations.size());
1276
1277 DestroyAllAllocations(allocations);
1278 }
1279
1280 // # Test 2
1281 // Buffers of fixed size.
1282 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1283 // Expected result: Each of 4 interations makes some progress.
1284 {
1285 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1286 {
1287 AllocInfo allocInfo;
1288 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1289 allocations.push_back(allocInfo);
1290 }
1291
1292 for(size_t i = 1; i < allocations.size(); ++i)
1293 {
1294 DestroyAllocation(allocations[i]);
1295 allocations.erase(allocations.begin() + i);
1296 }
1297
1298 VmaDefragmentationInfo defragInfo = {};
1299 defragInfo.maxAllocationsToMove = 1;
1300 defragInfo.maxBytesToMove = BUF_SIZE;
1301
1302 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1303 {
1304 VmaDefragmentationStats defragStats;
1305 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001306 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001307 }
1308
1309 ValidateAllocationsData(allocations.data(), allocations.size());
1310
1311 DestroyAllAllocations(allocations);
1312 }
1313
1314 // # Test 3
1315 // Buffers of variable size.
1316 // Create a number of buffers. Remove some percent of them.
1317 // Defragment while having some percent of them unmovable.
1318 // Expected result: Just simple validation.
1319 {
1320 for(size_t i = 0; i < 100; ++i)
1321 {
1322 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1323 localBufCreateInfo.size = RandomBufSize();
1324
1325 AllocInfo allocInfo;
1326 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1327 allocations.push_back(allocInfo);
1328 }
1329
1330 const uint32_t percentToDelete = 60;
1331 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1332 for(size_t i = 0; i < numberToDelete; ++i)
1333 {
1334 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1335 DestroyAllocation(allocations[indexToDelete]);
1336 allocations.erase(allocations.begin() + indexToDelete);
1337 }
1338
1339 // Non-movable allocations will be at the beginning of allocations array.
1340 const uint32_t percentNonMovable = 20;
1341 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1342 for(size_t i = 0; i < numberNonMovable; ++i)
1343 {
1344 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1345 if(indexNonMovable != i)
1346 std::swap(allocations[i], allocations[indexNonMovable]);
1347 }
1348
1349 VmaDefragmentationStats defragStats;
1350 Defragment(
1351 allocations.data() + numberNonMovable,
1352 allocations.size() - numberNonMovable,
1353 nullptr, &defragStats);
1354
1355 ValidateAllocationsData(allocations.data(), allocations.size());
1356
1357 DestroyAllAllocations(allocations);
1358 }
1359 }
1360
1361 vmaDestroyPool(g_hAllocator, pool);
1362}
1363
1364void TestDefragmentationFull()
1365{
1366 std::vector<AllocInfo> allocations;
1367
1368 // Create initial allocations.
1369 for(size_t i = 0; i < 400; ++i)
1370 {
1371 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001372 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001373 allocations.push_back(allocation);
1374 }
1375
1376 // Delete random allocations
1377 const size_t allocationsToDeletePercent = 80;
1378 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1379 for(size_t i = 0; i < allocationsToDelete; ++i)
1380 {
1381 size_t index = (size_t)rand() % allocations.size();
1382 DestroyAllocation(allocations[index]);
1383 allocations.erase(allocations.begin() + index);
1384 }
1385
1386 for(size_t i = 0; i < allocations.size(); ++i)
1387 ValidateAllocationData(allocations[i]);
1388
Adam Sawicki0667e332018-08-24 17:26:44 +02001389 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001390
1391 {
1392 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1393 for(size_t i = 0; i < allocations.size(); ++i)
1394 vmaAllocations[i] = allocations[i].m_Allocation;
1395
1396 const size_t nonMovablePercent = 0;
1397 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1398 for(size_t i = 0; i < nonMovableCount; ++i)
1399 {
1400 size_t index = (size_t)rand() % vmaAllocations.size();
1401 vmaAllocations.erase(vmaAllocations.begin() + index);
1402 }
1403
1404 const uint32_t defragCount = 1;
1405 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1406 {
1407 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1408
1409 VmaDefragmentationInfo defragmentationInfo;
1410 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1411 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1412
1413 wprintf(L"Defragmentation #%u\n", defragIndex);
1414
1415 time_point begTime = std::chrono::high_resolution_clock::now();
1416
1417 VmaDefragmentationStats stats;
1418 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001419 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001420
1421 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1422
1423 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1424 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1425 wprintf(L"Time: %.2f s\n", defragmentDuration);
1426
1427 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1428 {
1429 if(allocationsChanged[i])
1430 {
1431 RecreateAllocationResource(allocations[i]);
1432 }
1433 }
1434
1435 for(size_t i = 0; i < allocations.size(); ++i)
1436 ValidateAllocationData(allocations[i]);
1437
Adam Sawicki0667e332018-08-24 17:26:44 +02001438 //wchar_t fileName[MAX_PATH];
1439 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1440 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001441 }
1442 }
1443
1444 // Destroy all remaining allocations.
1445 DestroyAllAllocations(allocations);
1446}
1447
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001448static void TestDefragmentationGpu()
1449{
1450 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001451 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001452
1453 std::vector<AllocInfo> allocations;
1454
1455 // Create that many allocations to surely fill 3 new blocks of 256 MB.
1456 const VkDeviceSize bufSize = 10ull * 1024 * 1024;
1457 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
1458 const size_t bufCount = (size_t)(totalSize / bufSize);
1459 const size_t percentToLeave = 20;
1460 RandomNumberGenerator rand = { 234522 };
1461
1462 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1463 bufCreateInfo.size = bufSize;
Adam Sawicki440307e2018-10-18 15:05:19 +02001464 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1465 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1466 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001467
1468 VmaAllocationCreateInfo allocCreateInfo = {};
1469 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1470 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1471 allocCreateInfo.pUserData = "TestDefragmentationGpu";
1472
1473 // Create all intended buffers.
1474 for(size_t i = 0; i < bufCount; ++i)
1475 {
1476 AllocInfo alloc;
1477 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1478 alloc.m_StartValue = rand.Generate();
1479 allocations.push_back(alloc);
1480 }
1481
1482 // Destroy some percentage of them.
1483 {
1484 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1485 for(size_t i = 0; i < buffersToDestroy; ++i)
1486 {
1487 const size_t index = rand.Generate() % allocations.size();
1488 allocations[index].Destroy();
1489 allocations.erase(allocations.begin() + index);
1490 }
1491 }
1492
1493 // Fill them with meaningful data.
1494 UploadGpuData(allocations.data(), allocations.size());
1495
1496 SaveAllocatorStatsToFile(L"GPU_defragmentation_A_before.json");
1497
1498 // Defragment using GPU only.
1499 {
1500 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001501
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001502 std::vector<VmaAllocation> allocationPtrs(allocCount);
1503 std::vector<VkBool32> allocationChanged(allocCount);
1504 for(size_t i = 0; i < allocCount; ++i)
1505 {
1506 allocationPtrs[i] = allocations[i].m_Allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001507 }
Adam Sawicki440307e2018-10-18 15:05:19 +02001508 memset(allocationChanged.data(), 0, allocCount * sizeof(VkBool32));
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001509
1510 BeginSingleTimeCommands();
1511
1512 VmaDefragmentationInfo2 defragInfo = {};
1513 defragInfo.allocationCount = (uint32_t)allocCount;
1514 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001515 defragInfo.pAllocationsChanged = allocationChanged.data();
1516 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001517 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1518 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1519
1520 VmaDefragmentationStats stats = {};
1521 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1522 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1523 TEST(res >= VK_SUCCESS);
1524
1525 EndSingleTimeCommands();
1526
1527 vmaDefragmentationEnd(g_hAllocator, ctx);
1528
1529 for(size_t i = 0; i < allocCount; ++i)
1530 {
1531 if(allocationChanged[i])
1532 {
1533 RecreateAllocationResource(allocations[i]);
1534 }
1535 }
1536
Adam Sawicki440307e2018-10-18 15:05:19 +02001537 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1538 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
1539 TEST(stats.allocationsLost == 0);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001540 }
1541
1542 ValidateGpuData(allocations.data(), allocations.size());
1543
1544 SaveAllocatorStatsToFile(L"GPU_defragmentation_B_after.json");
1545
1546 // Destroy all remaining buffers.
1547 for(size_t i = allocations.size(); i--; )
1548 {
1549 allocations[i].Destroy();
1550 }
Adam Sawicki05704002018-11-08 16:07:29 +01001551
1552 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001553}
1554
Adam Sawickib8333fb2018-03-13 16:15:53 +01001555static void TestUserData()
1556{
1557 VkResult res;
1558
1559 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1560 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1561 bufCreateInfo.size = 0x10000;
1562
1563 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1564 {
1565 // Opaque pointer
1566 {
1567
1568 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1569 void* pointerToSomething = &res;
1570
1571 VmaAllocationCreateInfo allocCreateInfo = {};
1572 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1573 allocCreateInfo.pUserData = numberAsPointer;
1574 if(testIndex == 1)
1575 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1576
1577 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1578 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001579 TEST(res == VK_SUCCESS);
1580 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001581
1582 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001583 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001584
1585 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1586 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001587 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001588
1589 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1590 }
1591
1592 // String
1593 {
1594 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1595 const char* name2 = "2";
1596 const size_t name1Len = strlen(name1);
1597
1598 char* name1Buf = new char[name1Len + 1];
1599 strcpy_s(name1Buf, name1Len + 1, name1);
1600
1601 VmaAllocationCreateInfo allocCreateInfo = {};
1602 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1603 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1604 allocCreateInfo.pUserData = name1Buf;
1605 if(testIndex == 1)
1606 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1607
1608 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1609 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001610 TEST(res == VK_SUCCESS);
1611 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1612 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001613
1614 delete[] name1Buf;
1615
1616 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001617 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001618
1619 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1620 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001621 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001622
1623 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1624 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001625 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001626
1627 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1628 }
1629 }
1630}
1631
1632static void TestMemoryRequirements()
1633{
1634 VkResult res;
1635 VkBuffer buf;
1636 VmaAllocation alloc;
1637 VmaAllocationInfo allocInfo;
1638
1639 const VkPhysicalDeviceMemoryProperties* memProps;
1640 vmaGetMemoryProperties(g_hAllocator, &memProps);
1641
1642 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1643 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1644 bufInfo.size = 128;
1645
1646 VmaAllocationCreateInfo allocCreateInfo = {};
1647
1648 // No requirements.
1649 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001650 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001651 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1652
1653 // Usage.
1654 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1655 allocCreateInfo.requiredFlags = 0;
1656 allocCreateInfo.preferredFlags = 0;
1657 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1658
1659 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001660 TEST(res == VK_SUCCESS);
1661 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001662 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1663
1664 // Required flags, preferred flags.
1665 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1666 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1667 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1668 allocCreateInfo.memoryTypeBits = 0;
1669
1670 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001671 TEST(res == VK_SUCCESS);
1672 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1673 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001674 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1675
1676 // memoryTypeBits.
1677 const uint32_t memType = allocInfo.memoryType;
1678 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1679 allocCreateInfo.requiredFlags = 0;
1680 allocCreateInfo.preferredFlags = 0;
1681 allocCreateInfo.memoryTypeBits = 1u << memType;
1682
1683 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001684 TEST(res == VK_SUCCESS);
1685 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001686 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1687
1688}
1689
1690static void TestBasics()
1691{
1692 VkResult res;
1693
1694 TestMemoryRequirements();
1695
1696 // Lost allocation
1697 {
1698 VmaAllocation alloc = VK_NULL_HANDLE;
1699 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001700 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001701
1702 VmaAllocationInfo allocInfo;
1703 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001704 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1705 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001706
1707 vmaFreeMemory(g_hAllocator, alloc);
1708 }
1709
1710 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1711 {
1712 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1713 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1714 bufCreateInfo.size = 128;
1715
1716 VmaAllocationCreateInfo allocCreateInfo = {};
1717 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1718 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1719
1720 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1721 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001722 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001723
1724 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1725
1726 // Same with OWN_MEMORY.
1727 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1728
1729 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001730 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001731
1732 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1733 }
1734
1735 TestUserData();
1736}
1737
1738void TestHeapSizeLimit()
1739{
1740 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1741 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1742
1743 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1744 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1745 {
1746 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1747 }
1748
1749 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1750 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1751 allocatorCreateInfo.device = g_hDevice;
1752 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1753
1754 VmaAllocator hAllocator;
1755 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001756 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001757
1758 struct Item
1759 {
1760 VkBuffer hBuf;
1761 VmaAllocation hAlloc;
1762 };
1763 std::vector<Item> items;
1764
1765 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1766 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1767
1768 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
1769 VmaAllocationInfo ownAllocInfo;
1770 {
1771 VmaAllocationCreateInfo allocCreateInfo = {};
1772 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1773 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1774
1775 bufCreateInfo.size = BLOCK_SIZE / 2;
1776
1777 for(size_t i = 0; i < 2; ++i)
1778 {
1779 Item item;
1780 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001781 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001782 items.push_back(item);
1783 }
1784 }
1785
1786 // Create pool to make sure allocations must be out of this memory type.
1787 VmaPoolCreateInfo poolCreateInfo = {};
1788 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
1789 poolCreateInfo.blockSize = BLOCK_SIZE;
1790
1791 VmaPool hPool;
1792 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001793 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001794
1795 // 2. Allocate normal buffers from all the remaining memory.
1796 {
1797 VmaAllocationCreateInfo allocCreateInfo = {};
1798 allocCreateInfo.pool = hPool;
1799
1800 bufCreateInfo.size = BLOCK_SIZE / 2;
1801
1802 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
1803 for(size_t i = 0; i < bufCount; ++i)
1804 {
1805 Item item;
1806 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001807 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001808 items.push_back(item);
1809 }
1810 }
1811
1812 // 3. Allocation of one more (even small) buffer should fail.
1813 {
1814 VmaAllocationCreateInfo allocCreateInfo = {};
1815 allocCreateInfo.pool = hPool;
1816
1817 bufCreateInfo.size = 128;
1818
1819 VkBuffer hBuf;
1820 VmaAllocation hAlloc;
1821 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001822 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001823 }
1824
1825 // Destroy everything.
1826 for(size_t i = items.size(); i--; )
1827 {
1828 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
1829 }
1830
1831 vmaDestroyPool(hAllocator, hPool);
1832
1833 vmaDestroyAllocator(hAllocator);
1834}
1835
Adam Sawicki212a4a62018-06-14 15:44:45 +02001836#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02001837static void TestDebugMargin()
1838{
1839 if(VMA_DEBUG_MARGIN == 0)
1840 {
1841 return;
1842 }
1843
1844 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02001845 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02001846
1847 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02001848 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02001849
1850 // Create few buffers of different size.
1851 const size_t BUF_COUNT = 10;
1852 BufferInfo buffers[BUF_COUNT];
1853 VmaAllocationInfo allocInfo[BUF_COUNT];
1854 for(size_t i = 0; i < 10; ++i)
1855 {
1856 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02001857 // Last one will be mapped.
1858 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02001859
1860 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001861 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02001862 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001863 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001864
1865 if(i == BUF_COUNT - 1)
1866 {
1867 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001868 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001869 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
1870 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
1871 }
Adam Sawicki73b16652018-06-11 16:39:25 +02001872 }
1873
1874 // Check if their offsets preserve margin between them.
1875 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
1876 {
1877 if(lhs.deviceMemory != rhs.deviceMemory)
1878 {
1879 return lhs.deviceMemory < rhs.deviceMemory;
1880 }
1881 return lhs.offset < rhs.offset;
1882 });
1883 for(size_t i = 1; i < BUF_COUNT; ++i)
1884 {
1885 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
1886 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02001887 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02001888 }
1889 }
1890
Adam Sawicki212a4a62018-06-14 15:44:45 +02001891 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001892 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001893
Adam Sawicki73b16652018-06-11 16:39:25 +02001894 // Destroy all buffers.
1895 for(size_t i = BUF_COUNT; i--; )
1896 {
1897 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
1898 }
1899}
Adam Sawicki212a4a62018-06-14 15:44:45 +02001900#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02001901
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001902static void TestLinearAllocator()
1903{
1904 wprintf(L"Test linear allocator\n");
1905
1906 RandomNumberGenerator rand{645332};
1907
1908 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1909 sampleBufCreateInfo.size = 1024; // Whatever.
1910 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1911
1912 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
1913 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1914
1915 VmaPoolCreateInfo poolCreateInfo = {};
1916 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001917 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001918
Adam Sawickiee082772018-06-20 17:45:49 +02001919 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001920 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
1921 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
1922
1923 VmaPool pool = nullptr;
1924 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001925 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001926
1927 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
1928
1929 VmaAllocationCreateInfo allocCreateInfo = {};
1930 allocCreateInfo.pool = pool;
1931
1932 constexpr size_t maxBufCount = 100;
1933 std::vector<BufferInfo> bufInfo;
1934
1935 constexpr VkDeviceSize bufSizeMin = 16;
1936 constexpr VkDeviceSize bufSizeMax = 1024;
1937 VmaAllocationInfo allocInfo;
1938 VkDeviceSize prevOffset = 0;
1939
1940 // Test one-time free.
1941 for(size_t i = 0; i < 2; ++i)
1942 {
1943 // Allocate number of buffers of varying size that surely fit into this block.
1944 VkDeviceSize bufSumSize = 0;
1945 for(size_t i = 0; i < maxBufCount; ++i)
1946 {
1947 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1948 BufferInfo newBufInfo;
1949 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1950 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001951 TEST(res == VK_SUCCESS);
1952 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001953 bufInfo.push_back(newBufInfo);
1954 prevOffset = allocInfo.offset;
1955 bufSumSize += bufCreateInfo.size;
1956 }
1957
1958 // Validate pool stats.
1959 VmaPoolStats stats;
1960 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001961 TEST(stats.size == poolCreateInfo.blockSize);
1962 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
1963 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001964
1965 // Destroy the buffers in random order.
1966 while(!bufInfo.empty())
1967 {
1968 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
1969 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
1970 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1971 bufInfo.erase(bufInfo.begin() + indexToDestroy);
1972 }
1973 }
1974
1975 // Test stack.
1976 {
1977 // Allocate number of buffers of varying size that surely fit into this block.
1978 for(size_t i = 0; i < maxBufCount; ++i)
1979 {
1980 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1981 BufferInfo newBufInfo;
1982 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1983 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001984 TEST(res == VK_SUCCESS);
1985 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001986 bufInfo.push_back(newBufInfo);
1987 prevOffset = allocInfo.offset;
1988 }
1989
1990 // Destroy few buffers from top of the stack.
1991 for(size_t i = 0; i < maxBufCount / 5; ++i)
1992 {
1993 const BufferInfo& currBufInfo = bufInfo.back();
1994 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1995 bufInfo.pop_back();
1996 }
1997
1998 // Create some more
1999 for(size_t i = 0; i < maxBufCount / 5; ++i)
2000 {
2001 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2002 BufferInfo newBufInfo;
2003 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2004 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002005 TEST(res == VK_SUCCESS);
2006 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002007 bufInfo.push_back(newBufInfo);
2008 prevOffset = allocInfo.offset;
2009 }
2010
2011 // Destroy the buffers in reverse order.
2012 while(!bufInfo.empty())
2013 {
2014 const BufferInfo& currBufInfo = bufInfo.back();
2015 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2016 bufInfo.pop_back();
2017 }
2018 }
2019
Adam Sawickiee082772018-06-20 17:45:49 +02002020 // Test ring buffer.
2021 {
2022 // Allocate number of buffers that surely fit into this block.
2023 bufCreateInfo.size = bufSizeMax;
2024 for(size_t i = 0; i < maxBufCount; ++i)
2025 {
2026 BufferInfo newBufInfo;
2027 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2028 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002029 TEST(res == VK_SUCCESS);
2030 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002031 bufInfo.push_back(newBufInfo);
2032 prevOffset = allocInfo.offset;
2033 }
2034
2035 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2036 const size_t buffersPerIter = maxBufCount / 10 - 1;
2037 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2038 for(size_t iter = 0; iter < iterCount; ++iter)
2039 {
2040 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2041 {
2042 const BufferInfo& currBufInfo = bufInfo.front();
2043 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2044 bufInfo.erase(bufInfo.begin());
2045 }
2046 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2047 {
2048 BufferInfo newBufInfo;
2049 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2050 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002051 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002052 bufInfo.push_back(newBufInfo);
2053 }
2054 }
2055
2056 // Allocate buffers until we reach out-of-memory.
2057 uint32_t debugIndex = 0;
2058 while(res == VK_SUCCESS)
2059 {
2060 BufferInfo newBufInfo;
2061 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2062 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2063 if(res == VK_SUCCESS)
2064 {
2065 bufInfo.push_back(newBufInfo);
2066 }
2067 else
2068 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002069 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002070 }
2071 ++debugIndex;
2072 }
2073
2074 // Destroy the buffers in random order.
2075 while(!bufInfo.empty())
2076 {
2077 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2078 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2079 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2080 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2081 }
2082 }
2083
Adam Sawicki680b2252018-08-22 14:47:32 +02002084 // Test double stack.
2085 {
2086 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2087 VkDeviceSize prevOffsetLower = 0;
2088 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2089 for(size_t i = 0; i < maxBufCount; ++i)
2090 {
2091 const bool upperAddress = (i % 2) != 0;
2092 if(upperAddress)
2093 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2094 else
2095 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2096 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2097 BufferInfo newBufInfo;
2098 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2099 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002100 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002101 if(upperAddress)
2102 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002103 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002104 prevOffsetUpper = allocInfo.offset;
2105 }
2106 else
2107 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002108 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002109 prevOffsetLower = allocInfo.offset;
2110 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002111 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002112 bufInfo.push_back(newBufInfo);
2113 }
2114
2115 // Destroy few buffers from top of the stack.
2116 for(size_t i = 0; i < maxBufCount / 5; ++i)
2117 {
2118 const BufferInfo& currBufInfo = bufInfo.back();
2119 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2120 bufInfo.pop_back();
2121 }
2122
2123 // Create some more
2124 for(size_t i = 0; i < maxBufCount / 5; ++i)
2125 {
2126 const bool upperAddress = (i % 2) != 0;
2127 if(upperAddress)
2128 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2129 else
2130 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2131 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2132 BufferInfo newBufInfo;
2133 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2134 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002135 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002136 bufInfo.push_back(newBufInfo);
2137 }
2138
2139 // Destroy the buffers in reverse order.
2140 while(!bufInfo.empty())
2141 {
2142 const BufferInfo& currBufInfo = bufInfo.back();
2143 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2144 bufInfo.pop_back();
2145 }
2146
2147 // Create buffers on both sides until we reach out of memory.
2148 prevOffsetLower = 0;
2149 prevOffsetUpper = poolCreateInfo.blockSize;
2150 res = VK_SUCCESS;
2151 for(size_t i = 0; res == VK_SUCCESS; ++i)
2152 {
2153 const bool upperAddress = (i % 2) != 0;
2154 if(upperAddress)
2155 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2156 else
2157 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2158 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2159 BufferInfo newBufInfo;
2160 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2161 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2162 if(res == VK_SUCCESS)
2163 {
2164 if(upperAddress)
2165 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002166 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002167 prevOffsetUpper = allocInfo.offset;
2168 }
2169 else
2170 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002171 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002172 prevOffsetLower = allocInfo.offset;
2173 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002174 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002175 bufInfo.push_back(newBufInfo);
2176 }
2177 }
2178
2179 // Destroy the buffers in random order.
2180 while(!bufInfo.empty())
2181 {
2182 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2183 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2184 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2185 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2186 }
2187
2188 // Create buffers on upper side only, constant size, until we reach out of memory.
2189 prevOffsetUpper = poolCreateInfo.blockSize;
2190 res = VK_SUCCESS;
2191 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2192 bufCreateInfo.size = bufSizeMax;
2193 for(size_t i = 0; res == VK_SUCCESS; ++i)
2194 {
2195 BufferInfo newBufInfo;
2196 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2197 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2198 if(res == VK_SUCCESS)
2199 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002200 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002201 prevOffsetUpper = allocInfo.offset;
2202 bufInfo.push_back(newBufInfo);
2203 }
2204 }
2205
2206 // Destroy the buffers in reverse order.
2207 while(!bufInfo.empty())
2208 {
2209 const BufferInfo& currBufInfo = bufInfo.back();
2210 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2211 bufInfo.pop_back();
2212 }
2213 }
2214
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002215 // Test ring buffer with lost allocations.
2216 {
2217 // Allocate number of buffers until pool is full.
2218 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2219 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2220 res = VK_SUCCESS;
2221 for(size_t i = 0; res == VK_SUCCESS; ++i)
2222 {
2223 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2224
2225 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2226
2227 BufferInfo newBufInfo;
2228 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2229 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2230 if(res == VK_SUCCESS)
2231 bufInfo.push_back(newBufInfo);
2232 }
2233
2234 // Free first half of it.
2235 {
2236 const size_t buffersToDelete = bufInfo.size() / 2;
2237 for(size_t i = 0; i < buffersToDelete; ++i)
2238 {
2239 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2240 }
2241 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2242 }
2243
2244 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002245 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002246 res = VK_SUCCESS;
2247 for(size_t i = 0; res == VK_SUCCESS; ++i)
2248 {
2249 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2250
2251 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2252
2253 BufferInfo newBufInfo;
2254 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2255 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2256 if(res == VK_SUCCESS)
2257 bufInfo.push_back(newBufInfo);
2258 }
2259
2260 VkDeviceSize firstNewOffset;
2261 {
2262 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2263
2264 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2265 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2266 bufCreateInfo.size = bufSizeMax;
2267
2268 BufferInfo newBufInfo;
2269 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2270 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002271 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002272 bufInfo.push_back(newBufInfo);
2273 firstNewOffset = allocInfo.offset;
2274
2275 // Make sure at least one buffer from the beginning became lost.
2276 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002277 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002278 }
2279
2280 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2281 size_t newCount = 1;
2282 for(;;)
2283 {
2284 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2285
2286 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2287
2288 BufferInfo newBufInfo;
2289 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2290 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002291 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002292 bufInfo.push_back(newBufInfo);
2293 ++newCount;
2294 if(allocInfo.offset < firstNewOffset)
2295 break;
2296 }
2297
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002298 // Delete buffers that are lost.
2299 for(size_t i = bufInfo.size(); i--; )
2300 {
2301 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2302 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2303 {
2304 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2305 bufInfo.erase(bufInfo.begin() + i);
2306 }
2307 }
2308
2309 // Test vmaMakePoolAllocationsLost
2310 {
2311 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2312
2313 size_t lostAllocCount = SIZE_MAX;
2314 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002315 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002316
2317 size_t realLostAllocCount = 0;
2318 for(size_t i = 0; i < bufInfo.size(); ++i)
2319 {
2320 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2321 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2322 ++realLostAllocCount;
2323 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002324 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002325 }
2326
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002327 // Destroy all the buffers in forward order.
2328 for(size_t i = 0; i < bufInfo.size(); ++i)
2329 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2330 bufInfo.clear();
2331 }
2332
Adam Sawicki70a683e2018-08-24 15:36:32 +02002333 vmaDestroyPool(g_hAllocator, pool);
2334}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002335
Adam Sawicki70a683e2018-08-24 15:36:32 +02002336static void TestLinearAllocatorMultiBlock()
2337{
2338 wprintf(L"Test linear allocator multi block\n");
2339
2340 RandomNumberGenerator rand{345673};
2341
2342 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2343 sampleBufCreateInfo.size = 1024 * 1024;
2344 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2345
2346 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2347 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2348
2349 VmaPoolCreateInfo poolCreateInfo = {};
2350 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2351 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002352 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002353
2354 VmaPool pool = nullptr;
2355 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002356 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002357
2358 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2359
2360 VmaAllocationCreateInfo allocCreateInfo = {};
2361 allocCreateInfo.pool = pool;
2362
2363 std::vector<BufferInfo> bufInfo;
2364 VmaAllocationInfo allocInfo;
2365
2366 // Test one-time free.
2367 {
2368 // Allocate buffers until we move to a second block.
2369 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2370 for(uint32_t i = 0; ; ++i)
2371 {
2372 BufferInfo newBufInfo;
2373 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2374 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002375 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002376 bufInfo.push_back(newBufInfo);
2377 if(lastMem && allocInfo.deviceMemory != lastMem)
2378 {
2379 break;
2380 }
2381 lastMem = allocInfo.deviceMemory;
2382 }
2383
Adam Sawickib8d34d52018-10-03 17:41:20 +02002384 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002385
2386 // Make sure that pool has now two blocks.
2387 VmaPoolStats poolStats = {};
2388 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002389 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002390
2391 // Destroy all the buffers in random order.
2392 while(!bufInfo.empty())
2393 {
2394 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2395 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2396 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2397 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2398 }
2399
2400 // Make sure that pool has now at most one block.
2401 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002402 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002403 }
2404
2405 // Test stack.
2406 {
2407 // Allocate buffers until we move to a second block.
2408 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2409 for(uint32_t i = 0; ; ++i)
2410 {
2411 BufferInfo newBufInfo;
2412 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2413 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002414 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002415 bufInfo.push_back(newBufInfo);
2416 if(lastMem && allocInfo.deviceMemory != lastMem)
2417 {
2418 break;
2419 }
2420 lastMem = allocInfo.deviceMemory;
2421 }
2422
Adam Sawickib8d34d52018-10-03 17:41:20 +02002423 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002424
2425 // Add few more buffers.
2426 for(uint32_t i = 0; i < 5; ++i)
2427 {
2428 BufferInfo newBufInfo;
2429 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2430 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002431 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002432 bufInfo.push_back(newBufInfo);
2433 }
2434
2435 // Make sure that pool has now two blocks.
2436 VmaPoolStats poolStats = {};
2437 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002438 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002439
2440 // Delete half of buffers, LIFO.
2441 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2442 {
2443 const BufferInfo& currBufInfo = bufInfo.back();
2444 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2445 bufInfo.pop_back();
2446 }
2447
2448 // Add one more buffer.
2449 BufferInfo newBufInfo;
2450 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2451 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002452 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002453 bufInfo.push_back(newBufInfo);
2454
2455 // Make sure that pool has now one block.
2456 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002457 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002458
2459 // Delete all the remaining buffers, LIFO.
2460 while(!bufInfo.empty())
2461 {
2462 const BufferInfo& currBufInfo = bufInfo.back();
2463 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2464 bufInfo.pop_back();
2465 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002466 }
2467
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002468 vmaDestroyPool(g_hAllocator, pool);
2469}
2470
Adam Sawickifd11d752018-08-22 15:02:10 +02002471static void ManuallyTestLinearAllocator()
2472{
2473 VmaStats origStats;
2474 vmaCalculateStats(g_hAllocator, &origStats);
2475
2476 wprintf(L"Manually test linear allocator\n");
2477
2478 RandomNumberGenerator rand{645332};
2479
2480 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2481 sampleBufCreateInfo.size = 1024; // Whatever.
2482 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2483
2484 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2485 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2486
2487 VmaPoolCreateInfo poolCreateInfo = {};
2488 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002489 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002490
2491 poolCreateInfo.blockSize = 10 * 1024;
2492 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2493 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2494
2495 VmaPool pool = nullptr;
2496 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002497 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002498
2499 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2500
2501 VmaAllocationCreateInfo allocCreateInfo = {};
2502 allocCreateInfo.pool = pool;
2503
2504 std::vector<BufferInfo> bufInfo;
2505 VmaAllocationInfo allocInfo;
2506 BufferInfo newBufInfo;
2507
2508 // Test double stack.
2509 {
2510 /*
2511 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2512 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2513
2514 Totally:
2515 1 block allocated
2516 10240 Vulkan bytes
2517 6 new allocations
2518 2256 bytes in allocations
2519 */
2520
2521 bufCreateInfo.size = 32;
2522 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2523 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002524 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002525 bufInfo.push_back(newBufInfo);
2526
2527 bufCreateInfo.size = 1024;
2528 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2529 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002530 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002531 bufInfo.push_back(newBufInfo);
2532
2533 bufCreateInfo.size = 32;
2534 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2535 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002536 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002537 bufInfo.push_back(newBufInfo);
2538
2539 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2540
2541 bufCreateInfo.size = 128;
2542 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2543 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002544 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002545 bufInfo.push_back(newBufInfo);
2546
2547 bufCreateInfo.size = 1024;
2548 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2549 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002550 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002551 bufInfo.push_back(newBufInfo);
2552
2553 bufCreateInfo.size = 16;
2554 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2555 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002556 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002557 bufInfo.push_back(newBufInfo);
2558
2559 VmaStats currStats;
2560 vmaCalculateStats(g_hAllocator, &currStats);
2561 VmaPoolStats poolStats;
2562 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2563
2564 char* statsStr = nullptr;
2565 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2566
2567 // PUT BREAKPOINT HERE TO CHECK.
2568 // Inspect: currStats versus origStats, poolStats, statsStr.
2569 int I = 0;
2570
2571 vmaFreeStatsString(g_hAllocator, statsStr);
2572
2573 // Destroy the buffers in reverse order.
2574 while(!bufInfo.empty())
2575 {
2576 const BufferInfo& currBufInfo = bufInfo.back();
2577 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2578 bufInfo.pop_back();
2579 }
2580 }
2581
2582 vmaDestroyPool(g_hAllocator, pool);
2583}
2584
Adam Sawicki80927152018-09-07 17:27:23 +02002585static void BenchmarkAlgorithmsCase(FILE* file,
2586 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002587 bool empty,
2588 VmaAllocationCreateFlags allocStrategy,
2589 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002590{
2591 RandomNumberGenerator rand{16223};
2592
2593 const VkDeviceSize bufSizeMin = 32;
2594 const VkDeviceSize bufSizeMax = 1024;
2595 const size_t maxBufCapacity = 10000;
2596 const uint32_t iterationCount = 10;
2597
2598 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2599 sampleBufCreateInfo.size = bufSizeMax;
2600 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2601
2602 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2603 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2604
2605 VmaPoolCreateInfo poolCreateInfo = {};
2606 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002607 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002608
2609 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002610 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002611 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2612
2613 VmaPool pool = nullptr;
2614 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002615 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002616
2617 // Buffer created just to get memory requirements. Never bound to any memory.
2618 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2619 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002620 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002621
2622 VkMemoryRequirements memReq = {};
2623 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2624
2625 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2626
2627 VmaAllocationCreateInfo allocCreateInfo = {};
2628 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002629 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002630
2631 VmaAllocation alloc;
2632 std::vector<VmaAllocation> baseAllocations;
2633
2634 if(!empty)
2635 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002636 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002637 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002638 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002639 {
2640 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2641 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002642 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002643 baseAllocations.push_back(alloc);
2644 totalSize += memReq.size;
2645 }
2646
2647 // Delete half of them, choose randomly.
2648 size_t allocsToDelete = baseAllocations.size() / 2;
2649 for(size_t i = 0; i < allocsToDelete; ++i)
2650 {
2651 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2652 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2653 baseAllocations.erase(baseAllocations.begin() + index);
2654 }
2655 }
2656
2657 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002658 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002659 std::vector<VmaAllocation> testAllocations;
2660 testAllocations.reserve(allocCount);
2661 duration allocTotalDuration = duration::zero();
2662 duration freeTotalDuration = duration::zero();
2663 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2664 {
2665 // Allocations
2666 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2667 for(size_t i = 0; i < allocCount; ++i)
2668 {
2669 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2670 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002671 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002672 testAllocations.push_back(alloc);
2673 }
2674 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2675
2676 // Deallocations
2677 switch(freeOrder)
2678 {
2679 case FREE_ORDER::FORWARD:
2680 // Leave testAllocations unchanged.
2681 break;
2682 case FREE_ORDER::BACKWARD:
2683 std::reverse(testAllocations.begin(), testAllocations.end());
2684 break;
2685 case FREE_ORDER::RANDOM:
2686 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2687 break;
2688 default: assert(0);
2689 }
2690
2691 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2692 for(size_t i = 0; i < allocCount; ++i)
2693 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2694 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2695
2696 testAllocations.clear();
2697 }
2698
2699 // Delete baseAllocations
2700 while(!baseAllocations.empty())
2701 {
2702 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2703 baseAllocations.pop_back();
2704 }
2705
2706 vmaDestroyPool(g_hAllocator, pool);
2707
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002708 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2709 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2710
Adam Sawicki80927152018-09-07 17:27:23 +02002711 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2712 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002713 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002714 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002715 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002716 allocTotalSeconds,
2717 freeTotalSeconds);
2718
2719 if(file)
2720 {
2721 std::string currTime;
2722 CurrentTimeToStr(currTime);
2723
Adam Sawicki80927152018-09-07 17:27:23 +02002724 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002725 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002726 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002727 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002728 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002729 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2730 allocTotalSeconds,
2731 freeTotalSeconds);
2732 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002733}
2734
Adam Sawicki80927152018-09-07 17:27:23 +02002735static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002736{
Adam Sawicki80927152018-09-07 17:27:23 +02002737 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002738
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002739 if(file)
2740 {
2741 fprintf(file,
2742 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002743 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002744 "Allocation time (s),Deallocation time (s)\n");
2745 }
2746
Adam Sawicki0a607132018-08-24 11:18:41 +02002747 uint32_t freeOrderCount = 1;
2748 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2749 freeOrderCount = 3;
2750 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2751 freeOrderCount = 2;
2752
2753 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002754 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002755
2756 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2757 {
2758 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2759 switch(freeOrderIndex)
2760 {
2761 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2762 case 1: freeOrder = FREE_ORDER::FORWARD; break;
2763 case 2: freeOrder = FREE_ORDER::RANDOM; break;
2764 default: assert(0);
2765 }
2766
2767 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
2768 {
Adam Sawicki80927152018-09-07 17:27:23 +02002769 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02002770 {
Adam Sawicki80927152018-09-07 17:27:23 +02002771 uint32_t algorithm = 0;
2772 switch(algorithmIndex)
2773 {
2774 case 0:
2775 break;
2776 case 1:
2777 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
2778 break;
2779 case 2:
2780 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2781 break;
2782 default:
2783 assert(0);
2784 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002785
Adam Sawicki80927152018-09-07 17:27:23 +02002786 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002787 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
2788 {
2789 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02002790 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002791 {
2792 switch(allocStrategyIndex)
2793 {
2794 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
2795 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
2796 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
2797 default: assert(0);
2798 }
2799 }
2800
Adam Sawicki80927152018-09-07 17:27:23 +02002801 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002802 file,
Adam Sawicki80927152018-09-07 17:27:23 +02002803 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002804 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002805 strategy,
2806 freeOrder); // freeOrder
2807 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002808 }
2809 }
2810 }
2811}
2812
Adam Sawickib8333fb2018-03-13 16:15:53 +01002813static void TestPool_SameSize()
2814{
2815 const VkDeviceSize BUF_SIZE = 1024 * 1024;
2816 const size_t BUF_COUNT = 100;
2817 VkResult res;
2818
2819 RandomNumberGenerator rand{123};
2820
2821 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2822 bufferInfo.size = BUF_SIZE;
2823 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2824
2825 uint32_t memoryTypeBits = UINT32_MAX;
2826 {
2827 VkBuffer dummyBuffer;
2828 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002829 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002830
2831 VkMemoryRequirements memReq;
2832 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2833 memoryTypeBits = memReq.memoryTypeBits;
2834
2835 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2836 }
2837
2838 VmaAllocationCreateInfo poolAllocInfo = {};
2839 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2840 uint32_t memTypeIndex;
2841 res = vmaFindMemoryTypeIndex(
2842 g_hAllocator,
2843 memoryTypeBits,
2844 &poolAllocInfo,
2845 &memTypeIndex);
2846
2847 VmaPoolCreateInfo poolCreateInfo = {};
2848 poolCreateInfo.memoryTypeIndex = memTypeIndex;
2849 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
2850 poolCreateInfo.minBlockCount = 1;
2851 poolCreateInfo.maxBlockCount = 4;
2852 poolCreateInfo.frameInUseCount = 0;
2853
2854 VmaPool pool;
2855 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002856 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002857
2858 vmaSetCurrentFrameIndex(g_hAllocator, 1);
2859
2860 VmaAllocationCreateInfo allocInfo = {};
2861 allocInfo.pool = pool;
2862 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
2863 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2864
2865 struct BufItem
2866 {
2867 VkBuffer Buf;
2868 VmaAllocation Alloc;
2869 };
2870 std::vector<BufItem> items;
2871
2872 // Fill entire pool.
2873 for(size_t i = 0; i < BUF_COUNT; ++i)
2874 {
2875 BufItem item;
2876 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002877 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002878 items.push_back(item);
2879 }
2880
2881 // Make sure that another allocation would fail.
2882 {
2883 BufItem item;
2884 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002885 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002886 }
2887
2888 // Validate that no buffer is lost. Also check that they are not mapped.
2889 for(size_t i = 0; i < items.size(); ++i)
2890 {
2891 VmaAllocationInfo allocInfo;
2892 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002893 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
2894 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002895 }
2896
2897 // Free some percent of random items.
2898 {
2899 const size_t PERCENT_TO_FREE = 10;
2900 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
2901 for(size_t i = 0; i < itemsToFree; ++i)
2902 {
2903 size_t index = (size_t)rand.Generate() % items.size();
2904 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
2905 items.erase(items.begin() + index);
2906 }
2907 }
2908
2909 // Randomly allocate and free items.
2910 {
2911 const size_t OPERATION_COUNT = BUF_COUNT;
2912 for(size_t i = 0; i < OPERATION_COUNT; ++i)
2913 {
2914 bool allocate = rand.Generate() % 2 != 0;
2915 if(allocate)
2916 {
2917 if(items.size() < BUF_COUNT)
2918 {
2919 BufItem item;
2920 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002921 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002922 items.push_back(item);
2923 }
2924 }
2925 else // Free
2926 {
2927 if(!items.empty())
2928 {
2929 size_t index = (size_t)rand.Generate() % items.size();
2930 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
2931 items.erase(items.begin() + index);
2932 }
2933 }
2934 }
2935 }
2936
2937 // Allocate up to maximum.
2938 while(items.size() < BUF_COUNT)
2939 {
2940 BufItem item;
2941 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002942 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002943 items.push_back(item);
2944 }
2945
2946 // Validate that no buffer is lost.
2947 for(size_t i = 0; i < items.size(); ++i)
2948 {
2949 VmaAllocationInfo allocInfo;
2950 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002951 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002952 }
2953
2954 // Next frame.
2955 vmaSetCurrentFrameIndex(g_hAllocator, 2);
2956
2957 // Allocate another BUF_COUNT buffers.
2958 for(size_t i = 0; i < BUF_COUNT; ++i)
2959 {
2960 BufItem item;
2961 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002962 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002963 items.push_back(item);
2964 }
2965
2966 // Make sure the first BUF_COUNT is lost. Delete them.
2967 for(size_t i = 0; i < BUF_COUNT; ++i)
2968 {
2969 VmaAllocationInfo allocInfo;
2970 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002971 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002972 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
2973 }
2974 items.erase(items.begin(), items.begin() + BUF_COUNT);
2975
2976 // Validate that no buffer is lost.
2977 for(size_t i = 0; i < items.size(); ++i)
2978 {
2979 VmaAllocationInfo allocInfo;
2980 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002981 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002982 }
2983
2984 // Free one item.
2985 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
2986 items.pop_back();
2987
2988 // Validate statistics.
2989 {
2990 VmaPoolStats poolStats = {};
2991 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002992 TEST(poolStats.allocationCount == items.size());
2993 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
2994 TEST(poolStats.unusedRangeCount == 1);
2995 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
2996 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002997 }
2998
2999 // Free all remaining items.
3000 for(size_t i = items.size(); i--; )
3001 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3002 items.clear();
3003
3004 // Allocate maximum items again.
3005 for(size_t i = 0; i < BUF_COUNT; ++i)
3006 {
3007 BufItem item;
3008 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003009 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003010 items.push_back(item);
3011 }
3012
3013 // Delete every other item.
3014 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3015 {
3016 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3017 items.erase(items.begin() + i);
3018 }
3019
3020 // Defragment!
3021 {
3022 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3023 for(size_t i = 0; i < items.size(); ++i)
3024 allocationsToDefragment[i] = items[i].Alloc;
3025
3026 VmaDefragmentationStats defragmentationStats;
3027 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003028 TEST(res == VK_SUCCESS);
3029 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003030 }
3031
3032 // Free all remaining items.
3033 for(size_t i = items.size(); i--; )
3034 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3035 items.clear();
3036
3037 ////////////////////////////////////////////////////////////////////////////////
3038 // Test for vmaMakePoolAllocationsLost
3039
3040 // Allocate 4 buffers on frame 10.
3041 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3042 for(size_t i = 0; i < 4; ++i)
3043 {
3044 BufItem item;
3045 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003046 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003047 items.push_back(item);
3048 }
3049
3050 // Touch first 2 of them on frame 11.
3051 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3052 for(size_t i = 0; i < 2; ++i)
3053 {
3054 VmaAllocationInfo allocInfo;
3055 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3056 }
3057
3058 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3059 size_t lostCount = 0xDEADC0DE;
3060 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003061 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003062
3063 // Make another call. Now 0 should be lost.
3064 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003065 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003066
3067 // Make another call, with null count. Should not crash.
3068 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3069
3070 // END: Free all remaining items.
3071 for(size_t i = items.size(); i--; )
3072 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3073
3074 items.clear();
3075
Adam Sawickid2924172018-06-11 12:48:46 +02003076 ////////////////////////////////////////////////////////////////////////////////
3077 // Test for allocation too large for pool
3078
3079 {
3080 VmaAllocationCreateInfo allocCreateInfo = {};
3081 allocCreateInfo.pool = pool;
3082
3083 VkMemoryRequirements memReq;
3084 memReq.memoryTypeBits = UINT32_MAX;
3085 memReq.alignment = 1;
3086 memReq.size = poolCreateInfo.blockSize + 4;
3087
3088 VmaAllocation alloc = nullptr;
3089 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003090 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003091 }
3092
Adam Sawickib8333fb2018-03-13 16:15:53 +01003093 vmaDestroyPool(g_hAllocator, pool);
3094}
3095
Adam Sawickie44c6262018-06-15 14:30:39 +02003096static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3097{
3098 const uint8_t* pBytes = (const uint8_t*)pMemory;
3099 for(size_t i = 0; i < size; ++i)
3100 {
3101 if(pBytes[i] != pattern)
3102 {
3103 return false;
3104 }
3105 }
3106 return true;
3107}
3108
3109static void TestAllocationsInitialization()
3110{
3111 VkResult res;
3112
3113 const size_t BUF_SIZE = 1024;
3114
3115 // Create pool.
3116
3117 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3118 bufInfo.size = BUF_SIZE;
3119 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3120
3121 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3122 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3123
3124 VmaPoolCreateInfo poolCreateInfo = {};
3125 poolCreateInfo.blockSize = BUF_SIZE * 10;
3126 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3127 poolCreateInfo.maxBlockCount = 1;
3128 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003129 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003130
3131 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3132 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003133 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003134
3135 // Create one persistently mapped buffer to keep memory of this block mapped,
3136 // so that pointer to mapped data will remain (more or less...) valid even
3137 // after destruction of other allocations.
3138
3139 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3140 VkBuffer firstBuf;
3141 VmaAllocation firstAlloc;
3142 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003143 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003144
3145 // Test buffers.
3146
3147 for(uint32_t i = 0; i < 2; ++i)
3148 {
3149 const bool persistentlyMapped = i == 0;
3150 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3151 VkBuffer buf;
3152 VmaAllocation alloc;
3153 VmaAllocationInfo allocInfo;
3154 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003155 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003156
3157 void* pMappedData;
3158 if(!persistentlyMapped)
3159 {
3160 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003161 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003162 }
3163 else
3164 {
3165 pMappedData = allocInfo.pMappedData;
3166 }
3167
3168 // Validate initialized content
3169 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003170 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003171
3172 if(!persistentlyMapped)
3173 {
3174 vmaUnmapMemory(g_hAllocator, alloc);
3175 }
3176
3177 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3178
3179 // Validate freed content
3180 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003181 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003182 }
3183
3184 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3185 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3186}
3187
Adam Sawickib8333fb2018-03-13 16:15:53 +01003188static void TestPool_Benchmark(
3189 PoolTestResult& outResult,
3190 const PoolTestConfig& config)
3191{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003192 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003193
3194 RandomNumberGenerator mainRand{config.RandSeed};
3195
3196 uint32_t allocationSizeProbabilitySum = std::accumulate(
3197 config.AllocationSizes.begin(),
3198 config.AllocationSizes.end(),
3199 0u,
3200 [](uint32_t sum, const AllocationSize& allocSize) {
3201 return sum + allocSize.Probability;
3202 });
3203
3204 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3205 bufferInfo.size = 256; // Whatever.
3206 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3207
3208 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3209 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3210 imageInfo.extent.width = 256; // Whatever.
3211 imageInfo.extent.height = 256; // Whatever.
3212 imageInfo.extent.depth = 1;
3213 imageInfo.mipLevels = 1;
3214 imageInfo.arrayLayers = 1;
3215 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3216 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3217 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3218 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3219 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3220
3221 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3222 {
3223 VkBuffer dummyBuffer;
3224 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003225 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003226
3227 VkMemoryRequirements memReq;
3228 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3229 bufferMemoryTypeBits = memReq.memoryTypeBits;
3230
3231 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3232 }
3233
3234 uint32_t imageMemoryTypeBits = UINT32_MAX;
3235 {
3236 VkImage dummyImage;
3237 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003238 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003239
3240 VkMemoryRequirements memReq;
3241 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3242 imageMemoryTypeBits = memReq.memoryTypeBits;
3243
3244 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3245 }
3246
3247 uint32_t memoryTypeBits = 0;
3248 if(config.UsesBuffers() && config.UsesImages())
3249 {
3250 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3251 if(memoryTypeBits == 0)
3252 {
3253 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3254 return;
3255 }
3256 }
3257 else if(config.UsesBuffers())
3258 memoryTypeBits = bufferMemoryTypeBits;
3259 else if(config.UsesImages())
3260 memoryTypeBits = imageMemoryTypeBits;
3261 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003262 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003263
3264 VmaPoolCreateInfo poolCreateInfo = {};
3265 poolCreateInfo.memoryTypeIndex = 0;
3266 poolCreateInfo.minBlockCount = 1;
3267 poolCreateInfo.maxBlockCount = 1;
3268 poolCreateInfo.blockSize = config.PoolSize;
3269 poolCreateInfo.frameInUseCount = 1;
3270
3271 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3272 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3273 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3274
3275 VmaPool pool;
3276 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003277 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003278
3279 // Start time measurement - after creating pool and initializing data structures.
3280 time_point timeBeg = std::chrono::high_resolution_clock::now();
3281
3282 ////////////////////////////////////////////////////////////////////////////////
3283 // ThreadProc
3284 auto ThreadProc = [&](
3285 PoolTestThreadResult* outThreadResult,
3286 uint32_t randSeed,
3287 HANDLE frameStartEvent,
3288 HANDLE frameEndEvent) -> void
3289 {
3290 RandomNumberGenerator threadRand{randSeed};
3291
3292 outThreadResult->AllocationTimeMin = duration::max();
3293 outThreadResult->AllocationTimeSum = duration::zero();
3294 outThreadResult->AllocationTimeMax = duration::min();
3295 outThreadResult->DeallocationTimeMin = duration::max();
3296 outThreadResult->DeallocationTimeSum = duration::zero();
3297 outThreadResult->DeallocationTimeMax = duration::min();
3298 outThreadResult->AllocationCount = 0;
3299 outThreadResult->DeallocationCount = 0;
3300 outThreadResult->LostAllocationCount = 0;
3301 outThreadResult->LostAllocationTotalSize = 0;
3302 outThreadResult->FailedAllocationCount = 0;
3303 outThreadResult->FailedAllocationTotalSize = 0;
3304
3305 struct Item
3306 {
3307 VkDeviceSize BufferSize;
3308 VkExtent2D ImageSize;
3309 VkBuffer Buf;
3310 VkImage Image;
3311 VmaAllocation Alloc;
3312
3313 VkDeviceSize CalcSizeBytes() const
3314 {
3315 return BufferSize +
3316 ImageSize.width * ImageSize.height * 4;
3317 }
3318 };
3319 std::vector<Item> unusedItems, usedItems;
3320
3321 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3322
3323 // Create all items - all unused, not yet allocated.
3324 for(size_t i = 0; i < threadTotalItemCount; ++i)
3325 {
3326 Item item = {};
3327
3328 uint32_t allocSizeIndex = 0;
3329 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3330 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3331 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3332
3333 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3334 if(allocSize.BufferSizeMax > 0)
3335 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003336 TEST(allocSize.BufferSizeMin > 0);
3337 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003338 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3339 item.BufferSize = allocSize.BufferSizeMin;
3340 else
3341 {
3342 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3343 item.BufferSize = item.BufferSize / 16 * 16;
3344 }
3345 }
3346 else
3347 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003348 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003349 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3350 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3351 else
3352 {
3353 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3354 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3355 }
3356 }
3357
3358 unusedItems.push_back(item);
3359 }
3360
3361 auto Allocate = [&](Item& item) -> VkResult
3362 {
3363 VmaAllocationCreateInfo allocCreateInfo = {};
3364 allocCreateInfo.pool = pool;
3365 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3366 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3367
3368 if(item.BufferSize)
3369 {
3370 bufferInfo.size = item.BufferSize;
3371 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3372 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3373 }
3374 else
3375 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003376 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003377
3378 imageInfo.extent.width = item.ImageSize.width;
3379 imageInfo.extent.height = item.ImageSize.height;
3380 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3381 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3382 }
3383 };
3384
3385 ////////////////////////////////////////////////////////////////////////////////
3386 // Frames
3387 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3388 {
3389 WaitForSingleObject(frameStartEvent, INFINITE);
3390
3391 // Always make some percent of used bufs unused, to choose different used ones.
3392 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3393 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3394 {
3395 size_t index = threadRand.Generate() % usedItems.size();
3396 unusedItems.push_back(usedItems[index]);
3397 usedItems.erase(usedItems.begin() + index);
3398 }
3399
3400 // Determine which bufs we want to use in this frame.
3401 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3402 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003403 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003404 // Move some used to unused.
3405 while(usedBufCount < usedItems.size())
3406 {
3407 size_t index = threadRand.Generate() % usedItems.size();
3408 unusedItems.push_back(usedItems[index]);
3409 usedItems.erase(usedItems.begin() + index);
3410 }
3411 // Move some unused to used.
3412 while(usedBufCount > usedItems.size())
3413 {
3414 size_t index = threadRand.Generate() % unusedItems.size();
3415 usedItems.push_back(unusedItems[index]);
3416 unusedItems.erase(unusedItems.begin() + index);
3417 }
3418
3419 uint32_t touchExistingCount = 0;
3420 uint32_t touchLostCount = 0;
3421 uint32_t createSucceededCount = 0;
3422 uint32_t createFailedCount = 0;
3423
3424 // Touch all used bufs. If not created or lost, allocate.
3425 for(size_t i = 0; i < usedItems.size(); ++i)
3426 {
3427 Item& item = usedItems[i];
3428 // Not yet created.
3429 if(item.Alloc == VK_NULL_HANDLE)
3430 {
3431 res = Allocate(item);
3432 ++outThreadResult->AllocationCount;
3433 if(res != VK_SUCCESS)
3434 {
3435 item.Alloc = VK_NULL_HANDLE;
3436 item.Buf = VK_NULL_HANDLE;
3437 ++outThreadResult->FailedAllocationCount;
3438 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3439 ++createFailedCount;
3440 }
3441 else
3442 ++createSucceededCount;
3443 }
3444 else
3445 {
3446 // Touch.
3447 VmaAllocationInfo allocInfo;
3448 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3449 // Lost.
3450 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3451 {
3452 ++touchLostCount;
3453
3454 // Destroy.
3455 {
3456 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3457 if(item.Buf)
3458 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3459 else
3460 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3461 ++outThreadResult->DeallocationCount;
3462 }
3463 item.Alloc = VK_NULL_HANDLE;
3464 item.Buf = VK_NULL_HANDLE;
3465
3466 ++outThreadResult->LostAllocationCount;
3467 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3468
3469 // Recreate.
3470 res = Allocate(item);
3471 ++outThreadResult->AllocationCount;
3472 // Creation failed.
3473 if(res != VK_SUCCESS)
3474 {
3475 ++outThreadResult->FailedAllocationCount;
3476 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3477 ++createFailedCount;
3478 }
3479 else
3480 ++createSucceededCount;
3481 }
3482 else
3483 ++touchExistingCount;
3484 }
3485 }
3486
3487 /*
3488 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3489 randSeed, frameIndex,
3490 touchExistingCount, touchLostCount,
3491 createSucceededCount, createFailedCount);
3492 */
3493
3494 SetEvent(frameEndEvent);
3495 }
3496
3497 // Free all remaining items.
3498 for(size_t i = usedItems.size(); i--; )
3499 {
3500 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3501 if(usedItems[i].Buf)
3502 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3503 else
3504 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3505 ++outThreadResult->DeallocationCount;
3506 }
3507 for(size_t i = unusedItems.size(); i--; )
3508 {
3509 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3510 if(unusedItems[i].Buf)
3511 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3512 else
3513 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3514 ++outThreadResult->DeallocationCount;
3515 }
3516 };
3517
3518 // Launch threads.
3519 uint32_t threadRandSeed = mainRand.Generate();
3520 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3521 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3522 std::vector<std::thread> bkgThreads;
3523 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3524 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3525 {
3526 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3527 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3528 bkgThreads.emplace_back(std::bind(
3529 ThreadProc,
3530 &threadResults[threadIndex],
3531 threadRandSeed + threadIndex,
3532 frameStartEvents[threadIndex],
3533 frameEndEvents[threadIndex]));
3534 }
3535
3536 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003537 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003538 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3539 {
3540 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3541 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3542 SetEvent(frameStartEvents[threadIndex]);
3543 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3544 }
3545
3546 // Wait for threads finished
3547 for(size_t i = 0; i < bkgThreads.size(); ++i)
3548 {
3549 bkgThreads[i].join();
3550 CloseHandle(frameEndEvents[i]);
3551 CloseHandle(frameStartEvents[i]);
3552 }
3553 bkgThreads.clear();
3554
3555 // Finish time measurement - before destroying pool.
3556 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3557
3558 vmaDestroyPool(g_hAllocator, pool);
3559
3560 outResult.AllocationTimeMin = duration::max();
3561 outResult.AllocationTimeAvg = duration::zero();
3562 outResult.AllocationTimeMax = duration::min();
3563 outResult.DeallocationTimeMin = duration::max();
3564 outResult.DeallocationTimeAvg = duration::zero();
3565 outResult.DeallocationTimeMax = duration::min();
3566 outResult.LostAllocationCount = 0;
3567 outResult.LostAllocationTotalSize = 0;
3568 outResult.FailedAllocationCount = 0;
3569 outResult.FailedAllocationTotalSize = 0;
3570 size_t allocationCount = 0;
3571 size_t deallocationCount = 0;
3572 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3573 {
3574 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3575 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3576 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3577 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3578 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3579 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3580 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3581 allocationCount += threadResult.AllocationCount;
3582 deallocationCount += threadResult.DeallocationCount;
3583 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3584 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3585 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3586 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3587 }
3588 if(allocationCount)
3589 outResult.AllocationTimeAvg /= allocationCount;
3590 if(deallocationCount)
3591 outResult.DeallocationTimeAvg /= deallocationCount;
3592}
3593
3594static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3595{
3596 if(ptr1 < ptr2)
3597 return ptr1 + size1 > ptr2;
3598 else if(ptr2 < ptr1)
3599 return ptr2 + size2 > ptr1;
3600 else
3601 return true;
3602}
3603
3604static void TestMapping()
3605{
3606 wprintf(L"Testing mapping...\n");
3607
3608 VkResult res;
3609 uint32_t memTypeIndex = UINT32_MAX;
3610
3611 enum TEST
3612 {
3613 TEST_NORMAL,
3614 TEST_POOL,
3615 TEST_DEDICATED,
3616 TEST_COUNT
3617 };
3618 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3619 {
3620 VmaPool pool = nullptr;
3621 if(testIndex == TEST_POOL)
3622 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003623 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003624 VmaPoolCreateInfo poolInfo = {};
3625 poolInfo.memoryTypeIndex = memTypeIndex;
3626 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003627 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003628 }
3629
3630 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3631 bufInfo.size = 0x10000;
3632 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3633
3634 VmaAllocationCreateInfo allocCreateInfo = {};
3635 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3636 allocCreateInfo.pool = pool;
3637 if(testIndex == TEST_DEDICATED)
3638 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3639
3640 VmaAllocationInfo allocInfo;
3641
3642 // Mapped manually
3643
3644 // Create 2 buffers.
3645 BufferInfo bufferInfos[3];
3646 for(size_t i = 0; i < 2; ++i)
3647 {
3648 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3649 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003650 TEST(res == VK_SUCCESS);
3651 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003652 memTypeIndex = allocInfo.memoryType;
3653 }
3654
3655 // Map buffer 0.
3656 char* data00 = nullptr;
3657 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003658 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003659 data00[0xFFFF] = data00[0];
3660
3661 // Map buffer 0 second time.
3662 char* data01 = nullptr;
3663 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003664 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003665
3666 // Map buffer 1.
3667 char* data1 = nullptr;
3668 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003669 TEST(res == VK_SUCCESS && data1 != nullptr);
3670 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01003671 data1[0xFFFF] = data1[0];
3672
3673 // Unmap buffer 0 two times.
3674 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3675 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3676 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003677 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003678
3679 // Unmap buffer 1.
3680 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
3681 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003682 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003683
3684 // Create 3rd buffer - persistently mapped.
3685 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3686 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3687 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003688 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003689
3690 // Map buffer 2.
3691 char* data2 = nullptr;
3692 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003693 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003694 data2[0xFFFF] = data2[0];
3695
3696 // Unmap buffer 2.
3697 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
3698 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003699 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003700
3701 // Destroy all buffers.
3702 for(size_t i = 3; i--; )
3703 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
3704
3705 vmaDestroyPool(g_hAllocator, pool);
3706 }
3707}
3708
3709static void TestMappingMultithreaded()
3710{
3711 wprintf(L"Testing mapping multithreaded...\n");
3712
3713 static const uint32_t threadCount = 16;
3714 static const uint32_t bufferCount = 1024;
3715 static const uint32_t threadBufferCount = bufferCount / threadCount;
3716
3717 VkResult res;
3718 volatile uint32_t memTypeIndex = UINT32_MAX;
3719
3720 enum TEST
3721 {
3722 TEST_NORMAL,
3723 TEST_POOL,
3724 TEST_DEDICATED,
3725 TEST_COUNT
3726 };
3727 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3728 {
3729 VmaPool pool = nullptr;
3730 if(testIndex == TEST_POOL)
3731 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003732 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003733 VmaPoolCreateInfo poolInfo = {};
3734 poolInfo.memoryTypeIndex = memTypeIndex;
3735 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003736 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003737 }
3738
3739 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3740 bufCreateInfo.size = 0x10000;
3741 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3742
3743 VmaAllocationCreateInfo allocCreateInfo = {};
3744 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3745 allocCreateInfo.pool = pool;
3746 if(testIndex == TEST_DEDICATED)
3747 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3748
3749 std::thread threads[threadCount];
3750 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
3751 {
3752 threads[threadIndex] = std::thread([=, &memTypeIndex](){
3753 // ======== THREAD FUNCTION ========
3754
3755 RandomNumberGenerator rand{threadIndex};
3756
3757 enum class MODE
3758 {
3759 // Don't map this buffer at all.
3760 DONT_MAP,
3761 // Map and quickly unmap.
3762 MAP_FOR_MOMENT,
3763 // Map and unmap before destruction.
3764 MAP_FOR_LONGER,
3765 // Map two times. Quickly unmap, second unmap before destruction.
3766 MAP_TWO_TIMES,
3767 // Create this buffer as persistently mapped.
3768 PERSISTENTLY_MAPPED,
3769 COUNT
3770 };
3771 std::vector<BufferInfo> bufInfos{threadBufferCount};
3772 std::vector<MODE> bufModes{threadBufferCount};
3773
3774 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
3775 {
3776 BufferInfo& bufInfo = bufInfos[bufferIndex];
3777 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
3778 bufModes[bufferIndex] = mode;
3779
3780 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
3781 if(mode == MODE::PERSISTENTLY_MAPPED)
3782 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3783
3784 VmaAllocationInfo allocInfo;
3785 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
3786 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003787 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003788
3789 if(memTypeIndex == UINT32_MAX)
3790 memTypeIndex = allocInfo.memoryType;
3791
3792 char* data = nullptr;
3793
3794 if(mode == MODE::PERSISTENTLY_MAPPED)
3795 {
3796 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003797 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003798 }
3799 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
3800 mode == MODE::MAP_TWO_TIMES)
3801 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003802 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003803 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003804 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003805
3806 if(mode == MODE::MAP_TWO_TIMES)
3807 {
3808 char* data2 = nullptr;
3809 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003810 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003811 }
3812 }
3813 else if(mode == MODE::DONT_MAP)
3814 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003815 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003816 }
3817 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003818 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003819
3820 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
3821 if(data)
3822 data[0xFFFF] = data[0];
3823
3824 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
3825 {
3826 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
3827
3828 VmaAllocationInfo allocInfo;
3829 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
3830 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02003831 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003832 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003833 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003834 }
3835
3836 switch(rand.Generate() % 3)
3837 {
3838 case 0: Sleep(0); break; // Yield.
3839 case 1: Sleep(10); break; // 10 ms
3840 // default: No sleep.
3841 }
3842
3843 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
3844 if(data)
3845 data[0xFFFF] = data[0];
3846 }
3847
3848 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
3849 {
3850 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
3851 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
3852 {
3853 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
3854
3855 VmaAllocationInfo allocInfo;
3856 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003857 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003858 }
3859
3860 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
3861 }
3862 });
3863 }
3864
3865 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
3866 threads[threadIndex].join();
3867
3868 vmaDestroyPool(g_hAllocator, pool);
3869 }
3870}
3871
3872static void WriteMainTestResultHeader(FILE* file)
3873{
3874 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02003875 "Code,Time,"
3876 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01003877 "Total Time (us),"
3878 "Allocation Time Min (us),"
3879 "Allocation Time Avg (us),"
3880 "Allocation Time Max (us),"
3881 "Deallocation Time Min (us),"
3882 "Deallocation Time Avg (us),"
3883 "Deallocation Time Max (us),"
3884 "Total Memory Allocated (B),"
3885 "Free Range Size Avg (B),"
3886 "Free Range Size Max (B)\n");
3887}
3888
3889static void WriteMainTestResult(
3890 FILE* file,
3891 const char* codeDescription,
3892 const char* testDescription,
3893 const Config& config, const Result& result)
3894{
3895 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
3896 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
3897 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
3898 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
3899 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
3900 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
3901 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
3902
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003903 std::string currTime;
3904 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003905
3906 fprintf(file,
3907 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01003908 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
3909 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003910 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02003911 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01003912 totalTimeSeconds * 1e6f,
3913 allocationTimeMinSeconds * 1e6f,
3914 allocationTimeAvgSeconds * 1e6f,
3915 allocationTimeMaxSeconds * 1e6f,
3916 deallocationTimeMinSeconds * 1e6f,
3917 deallocationTimeAvgSeconds * 1e6f,
3918 deallocationTimeMaxSeconds * 1e6f,
3919 result.TotalMemoryAllocated,
3920 result.FreeRangeSizeAvg,
3921 result.FreeRangeSizeMax);
3922}
3923
3924static void WritePoolTestResultHeader(FILE* file)
3925{
3926 fprintf(file,
3927 "Code,Test,Time,"
3928 "Config,"
3929 "Total Time (us),"
3930 "Allocation Time Min (us),"
3931 "Allocation Time Avg (us),"
3932 "Allocation Time Max (us),"
3933 "Deallocation Time Min (us),"
3934 "Deallocation Time Avg (us),"
3935 "Deallocation Time Max (us),"
3936 "Lost Allocation Count,"
3937 "Lost Allocation Total Size (B),"
3938 "Failed Allocation Count,"
3939 "Failed Allocation Total Size (B)\n");
3940}
3941
3942static void WritePoolTestResult(
3943 FILE* file,
3944 const char* codeDescription,
3945 const char* testDescription,
3946 const PoolTestConfig& config,
3947 const PoolTestResult& result)
3948{
3949 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
3950 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
3951 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
3952 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
3953 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
3954 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
3955 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
3956
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003957 std::string currTime;
3958 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003959
3960 fprintf(file,
3961 "%s,%s,%s,"
3962 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
3963 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
3964 // General
3965 codeDescription,
3966 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003967 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01003968 // Config
3969 config.ThreadCount,
3970 (unsigned long long)config.PoolSize,
3971 config.FrameCount,
3972 config.TotalItemCount,
3973 config.UsedItemCountMin,
3974 config.UsedItemCountMax,
3975 config.ItemsToMakeUnusedPercent,
3976 // Results
3977 totalTimeSeconds * 1e6f,
3978 allocationTimeMinSeconds * 1e6f,
3979 allocationTimeAvgSeconds * 1e6f,
3980 allocationTimeMaxSeconds * 1e6f,
3981 deallocationTimeMinSeconds * 1e6f,
3982 deallocationTimeAvgSeconds * 1e6f,
3983 deallocationTimeMaxSeconds * 1e6f,
3984 result.LostAllocationCount,
3985 result.LostAllocationTotalSize,
3986 result.FailedAllocationCount,
3987 result.FailedAllocationTotalSize);
3988}
3989
3990static void PerformCustomMainTest(FILE* file)
3991{
3992 Config config{};
3993 config.RandSeed = 65735476;
3994 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
3995 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
3996 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
3997 config.FreeOrder = FREE_ORDER::FORWARD;
3998 config.ThreadCount = 16;
3999 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004000 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004001
4002 // Buffers
4003 //config.AllocationSizes.push_back({4, 16, 1024});
4004 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4005
4006 // Images
4007 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4008 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4009
4010 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4011 config.AdditionalOperationCount = 1024;
4012
4013 Result result{};
4014 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004015 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004016 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4017}
4018
4019static void PerformCustomPoolTest(FILE* file)
4020{
4021 PoolTestConfig config;
4022 config.PoolSize = 100 * 1024 * 1024;
4023 config.RandSeed = 2345764;
4024 config.ThreadCount = 1;
4025 config.FrameCount = 200;
4026 config.ItemsToMakeUnusedPercent = 2;
4027
4028 AllocationSize allocSize = {};
4029 allocSize.BufferSizeMin = 1024;
4030 allocSize.BufferSizeMax = 1024 * 1024;
4031 allocSize.Probability = 1;
4032 config.AllocationSizes.push_back(allocSize);
4033
4034 allocSize.BufferSizeMin = 0;
4035 allocSize.BufferSizeMax = 0;
4036 allocSize.ImageSizeMin = 128;
4037 allocSize.ImageSizeMax = 1024;
4038 allocSize.Probability = 1;
4039 config.AllocationSizes.push_back(allocSize);
4040
4041 config.PoolSize = config.CalcAvgResourceSize() * 200;
4042 config.UsedItemCountMax = 160;
4043 config.TotalItemCount = config.UsedItemCountMax * 10;
4044 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4045
4046 g_MemoryAliasingWarningEnabled = false;
4047 PoolTestResult result = {};
4048 TestPool_Benchmark(result, config);
4049 g_MemoryAliasingWarningEnabled = true;
4050
4051 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4052}
4053
Adam Sawickib8333fb2018-03-13 16:15:53 +01004054static void PerformMainTests(FILE* file)
4055{
4056 uint32_t repeatCount = 1;
4057 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4058
4059 Config config{};
4060 config.RandSeed = 65735476;
4061 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4062 config.FreeOrder = FREE_ORDER::FORWARD;
4063
4064 size_t threadCountCount = 1;
4065 switch(ConfigType)
4066 {
4067 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4068 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4069 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4070 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4071 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4072 default: assert(0);
4073 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004074
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004075 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004076
Adam Sawickib8333fb2018-03-13 16:15:53 +01004077 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4078 {
4079 std::string desc1;
4080
4081 switch(threadCountIndex)
4082 {
4083 case 0:
4084 desc1 += "1_thread";
4085 config.ThreadCount = 1;
4086 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4087 break;
4088 case 1:
4089 desc1 += "16_threads+0%_common";
4090 config.ThreadCount = 16;
4091 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4092 break;
4093 case 2:
4094 desc1 += "16_threads+50%_common";
4095 config.ThreadCount = 16;
4096 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4097 break;
4098 case 3:
4099 desc1 += "16_threads+100%_common";
4100 config.ThreadCount = 16;
4101 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4102 break;
4103 case 4:
4104 desc1 += "2_threads+0%_common";
4105 config.ThreadCount = 2;
4106 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4107 break;
4108 case 5:
4109 desc1 += "2_threads+50%_common";
4110 config.ThreadCount = 2;
4111 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4112 break;
4113 case 6:
4114 desc1 += "2_threads+100%_common";
4115 config.ThreadCount = 2;
4116 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4117 break;
4118 default:
4119 assert(0);
4120 }
4121
4122 // 0 = buffers, 1 = images, 2 = buffers and images
4123 size_t buffersVsImagesCount = 2;
4124 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4125 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4126 {
4127 std::string desc2 = desc1;
4128 switch(buffersVsImagesIndex)
4129 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004130 case 0: desc2 += ",Buffers"; break;
4131 case 1: desc2 += ",Images"; break;
4132 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004133 default: assert(0);
4134 }
4135
4136 // 0 = small, 1 = large, 2 = small and large
4137 size_t smallVsLargeCount = 2;
4138 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4139 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4140 {
4141 std::string desc3 = desc2;
4142 switch(smallVsLargeIndex)
4143 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004144 case 0: desc3 += ",Small"; break;
4145 case 1: desc3 += ",Large"; break;
4146 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004147 default: assert(0);
4148 }
4149
4150 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4151 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4152 else
4153 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4154
4155 // 0 = varying sizes min...max, 1 = set of constant sizes
4156 size_t constantSizesCount = 1;
4157 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4158 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4159 {
4160 std::string desc4 = desc3;
4161 switch(constantSizesIndex)
4162 {
4163 case 0: desc4 += " Varying_sizes"; break;
4164 case 1: desc4 += " Constant_sizes"; break;
4165 default: assert(0);
4166 }
4167
4168 config.AllocationSizes.clear();
4169 // Buffers present
4170 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4171 {
4172 // Small
4173 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4174 {
4175 // Varying size
4176 if(constantSizesIndex == 0)
4177 config.AllocationSizes.push_back({4, 16, 1024});
4178 // Constant sizes
4179 else
4180 {
4181 config.AllocationSizes.push_back({1, 16, 16});
4182 config.AllocationSizes.push_back({1, 64, 64});
4183 config.AllocationSizes.push_back({1, 256, 256});
4184 config.AllocationSizes.push_back({1, 1024, 1024});
4185 }
4186 }
4187 // Large
4188 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4189 {
4190 // Varying size
4191 if(constantSizesIndex == 0)
4192 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4193 // Constant sizes
4194 else
4195 {
4196 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4197 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4198 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4199 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4200 }
4201 }
4202 }
4203 // Images present
4204 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4205 {
4206 // Small
4207 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4208 {
4209 // Varying size
4210 if(constantSizesIndex == 0)
4211 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4212 // Constant sizes
4213 else
4214 {
4215 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4216 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4217 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4218 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4219 }
4220 }
4221 // Large
4222 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4223 {
4224 // Varying size
4225 if(constantSizesIndex == 0)
4226 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4227 // Constant sizes
4228 else
4229 {
4230 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4231 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4232 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4233 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4234 }
4235 }
4236 }
4237
4238 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4239 size_t beginBytesToAllocateCount = 1;
4240 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4241 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4242 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4243 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4244 {
4245 std::string desc5 = desc4;
4246
4247 switch(beginBytesToAllocateIndex)
4248 {
4249 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004250 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004251 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4252 config.AdditionalOperationCount = 0;
4253 break;
4254 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004255 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004256 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4257 config.AdditionalOperationCount = 1024;
4258 break;
4259 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004260 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004261 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4262 config.AdditionalOperationCount = 1024;
4263 break;
4264 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004265 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004266 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4267 config.AdditionalOperationCount = 1024;
4268 break;
4269 default:
4270 assert(0);
4271 }
4272
Adam Sawicki0667e332018-08-24 17:26:44 +02004273 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004274 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004275 std::string desc6 = desc5;
4276 switch(strategyIndex)
4277 {
4278 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004279 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004280 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4281 break;
4282 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004283 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004284 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4285 break;
4286 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004287 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004288 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4289 break;
4290 default:
4291 assert(0);
4292 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004293
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004294 desc6 += ',';
4295 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004296
4297 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004298
4299 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4300 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004301 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004302
4303 Result result{};
4304 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004305 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004306 if(file)
4307 {
4308 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4309 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004310 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004311 }
4312 }
4313 }
4314 }
4315 }
4316 }
4317}
4318
4319static void PerformPoolTests(FILE* file)
4320{
4321 const size_t AVG_RESOURCES_PER_POOL = 300;
4322
4323 uint32_t repeatCount = 1;
4324 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4325
4326 PoolTestConfig config{};
4327 config.RandSeed = 2346343;
4328 config.FrameCount = 200;
4329 config.ItemsToMakeUnusedPercent = 2;
4330
4331 size_t threadCountCount = 1;
4332 switch(ConfigType)
4333 {
4334 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4335 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4336 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4337 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4338 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4339 default: assert(0);
4340 }
4341 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4342 {
4343 std::string desc1;
4344
4345 switch(threadCountIndex)
4346 {
4347 case 0:
4348 desc1 += "1_thread";
4349 config.ThreadCount = 1;
4350 break;
4351 case 1:
4352 desc1 += "16_threads";
4353 config.ThreadCount = 16;
4354 break;
4355 case 2:
4356 desc1 += "2_threads";
4357 config.ThreadCount = 2;
4358 break;
4359 default:
4360 assert(0);
4361 }
4362
4363 // 0 = buffers, 1 = images, 2 = buffers and images
4364 size_t buffersVsImagesCount = 2;
4365 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4366 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4367 {
4368 std::string desc2 = desc1;
4369 switch(buffersVsImagesIndex)
4370 {
4371 case 0: desc2 += " Buffers"; break;
4372 case 1: desc2 += " Images"; break;
4373 case 2: desc2 += " Buffers+Images"; break;
4374 default: assert(0);
4375 }
4376
4377 // 0 = small, 1 = large, 2 = small and large
4378 size_t smallVsLargeCount = 2;
4379 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4380 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4381 {
4382 std::string desc3 = desc2;
4383 switch(smallVsLargeIndex)
4384 {
4385 case 0: desc3 += " Small"; break;
4386 case 1: desc3 += " Large"; break;
4387 case 2: desc3 += " Small+Large"; break;
4388 default: assert(0);
4389 }
4390
4391 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4392 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4393 else
4394 config.PoolSize = 4ull * 1024 * 1024;
4395
4396 // 0 = varying sizes min...max, 1 = set of constant sizes
4397 size_t constantSizesCount = 1;
4398 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4399 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4400 {
4401 std::string desc4 = desc3;
4402 switch(constantSizesIndex)
4403 {
4404 case 0: desc4 += " Varying_sizes"; break;
4405 case 1: desc4 += " Constant_sizes"; break;
4406 default: assert(0);
4407 }
4408
4409 config.AllocationSizes.clear();
4410 // Buffers present
4411 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4412 {
4413 // Small
4414 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4415 {
4416 // Varying size
4417 if(constantSizesIndex == 0)
4418 config.AllocationSizes.push_back({4, 16, 1024});
4419 // Constant sizes
4420 else
4421 {
4422 config.AllocationSizes.push_back({1, 16, 16});
4423 config.AllocationSizes.push_back({1, 64, 64});
4424 config.AllocationSizes.push_back({1, 256, 256});
4425 config.AllocationSizes.push_back({1, 1024, 1024});
4426 }
4427 }
4428 // Large
4429 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4430 {
4431 // Varying size
4432 if(constantSizesIndex == 0)
4433 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4434 // Constant sizes
4435 else
4436 {
4437 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4438 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4439 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4440 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4441 }
4442 }
4443 }
4444 // Images present
4445 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4446 {
4447 // Small
4448 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4449 {
4450 // Varying size
4451 if(constantSizesIndex == 0)
4452 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4453 // Constant sizes
4454 else
4455 {
4456 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4457 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4458 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4459 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4460 }
4461 }
4462 // Large
4463 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4464 {
4465 // Varying size
4466 if(constantSizesIndex == 0)
4467 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4468 // Constant sizes
4469 else
4470 {
4471 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4472 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4473 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4474 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4475 }
4476 }
4477 }
4478
4479 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4480 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4481
4482 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4483 size_t subscriptionModeCount;
4484 switch(ConfigType)
4485 {
4486 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4487 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4488 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4489 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4490 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4491 default: assert(0);
4492 }
4493 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4494 {
4495 std::string desc5 = desc4;
4496
4497 switch(subscriptionModeIndex)
4498 {
4499 case 0:
4500 desc5 += " Subscription_66%";
4501 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4502 break;
4503 case 1:
4504 desc5 += " Subscription_133%";
4505 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4506 break;
4507 case 2:
4508 desc5 += " Subscription_100%";
4509 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4510 break;
4511 case 3:
4512 desc5 += " Subscription_33%";
4513 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4514 break;
4515 case 4:
4516 desc5 += " Subscription_166%";
4517 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4518 break;
4519 default:
4520 assert(0);
4521 }
4522
4523 config.TotalItemCount = config.UsedItemCountMax * 5;
4524 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4525
4526 const char* testDescription = desc5.c_str();
4527
4528 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4529 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004530 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004531
4532 PoolTestResult result{};
4533 g_MemoryAliasingWarningEnabled = false;
4534 TestPool_Benchmark(result, config);
4535 g_MemoryAliasingWarningEnabled = true;
4536 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4537 }
4538 }
4539 }
4540 }
4541 }
4542 }
4543}
4544
Adam Sawickia83793a2018-09-03 13:40:42 +02004545static void BasicTestBuddyAllocator()
4546{
4547 wprintf(L"Basic test buddy allocator\n");
4548
4549 RandomNumberGenerator rand{76543};
4550
4551 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4552 sampleBufCreateInfo.size = 1024; // Whatever.
4553 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4554
4555 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4556 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4557
4558 VmaPoolCreateInfo poolCreateInfo = {};
4559 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004560 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004561
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004562 // Deliberately adding 1023 to test usable size smaller than memory block size.
4563 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004564 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004565 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004566
4567 VmaPool pool = nullptr;
4568 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004569 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004570
4571 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4572
4573 VmaAllocationCreateInfo allocCreateInfo = {};
4574 allocCreateInfo.pool = pool;
4575
4576 std::vector<BufferInfo> bufInfo;
4577 BufferInfo newBufInfo;
4578 VmaAllocationInfo allocInfo;
4579
4580 bufCreateInfo.size = 1024 * 256;
4581 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4582 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004583 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004584 bufInfo.push_back(newBufInfo);
4585
4586 bufCreateInfo.size = 1024 * 512;
4587 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4588 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004589 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004590 bufInfo.push_back(newBufInfo);
4591
4592 bufCreateInfo.size = 1024 * 128;
4593 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4594 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004595 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004596 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004597
4598 // Test very small allocation, smaller than minimum node size.
4599 bufCreateInfo.size = 1;
4600 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4601 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004602 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004603 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004604
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004605 // Test some small allocation with alignment requirement.
4606 {
4607 VkMemoryRequirements memReq;
4608 memReq.alignment = 256;
4609 memReq.memoryTypeBits = UINT32_MAX;
4610 memReq.size = 32;
4611
4612 newBufInfo.Buffer = VK_NULL_HANDLE;
4613 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4614 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004615 TEST(res == VK_SUCCESS);
4616 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004617 bufInfo.push_back(newBufInfo);
4618 }
4619
4620 //SaveAllocatorStatsToFile(L"TEST.json");
4621
Adam Sawicki21017c62018-09-07 15:26:59 +02004622 VmaPoolStats stats = {};
4623 vmaGetPoolStats(g_hAllocator, pool, &stats);
4624 int DBG = 0; // Set breakpoint here to inspect `stats`.
4625
Adam Sawicki80927152018-09-07 17:27:23 +02004626 // Allocate enough new buffers to surely fall into second block.
4627 for(uint32_t i = 0; i < 32; ++i)
4628 {
4629 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4630 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4631 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004632 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004633 bufInfo.push_back(newBufInfo);
4634 }
4635
4636 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4637
Adam Sawickia83793a2018-09-03 13:40:42 +02004638 // Destroy the buffers in random order.
4639 while(!bufInfo.empty())
4640 {
4641 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4642 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4643 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4644 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4645 }
4646
4647 vmaDestroyPool(g_hAllocator, pool);
4648}
4649
Adam Sawickif2975342018-10-16 13:49:02 +02004650// Test the testing environment.
4651static void TestGpuData()
4652{
4653 RandomNumberGenerator rand = { 53434 };
4654
4655 std::vector<AllocInfo> allocInfo;
4656
4657 for(size_t i = 0; i < 100; ++i)
4658 {
4659 AllocInfo info = {};
4660
4661 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4662 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
4663 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
4664 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4665 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
4666
4667 VmaAllocationCreateInfo allocCreateInfo = {};
4668 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4669
4670 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
4671 TEST(res == VK_SUCCESS);
4672
4673 info.m_StartValue = rand.Generate();
4674
4675 allocInfo.push_back(std::move(info));
4676 }
4677
4678 UploadGpuData(allocInfo.data(), allocInfo.size());
4679
4680 ValidateGpuData(allocInfo.data(), allocInfo.size());
4681
4682 DestroyAllAllocations(allocInfo);
4683}
4684
Adam Sawickib8333fb2018-03-13 16:15:53 +01004685void Test()
4686{
4687 wprintf(L"TESTING:\n");
4688
Adam Sawickif2975342018-10-16 13:49:02 +02004689 if(true)
Adam Sawicki70a683e2018-08-24 15:36:32 +02004690 {
4691 // # Temporarily insert custom tests here
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004692 // ########################################
4693 // ########################################
Adam Sawicki80927152018-09-07 17:27:23 +02004694
Adam Sawickiff0f7b82018-10-18 14:44:05 +02004695 TestDefragmentationGpu();
Adam Sawicki05704002018-11-08 16:07:29 +01004696 TestDefragmentationSimple();
4697 TestDefragmentationFull();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004698 return;
4699 }
4700
Adam Sawickib8333fb2018-03-13 16:15:53 +01004701 // # Simple tests
4702
4703 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02004704 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02004705#if VMA_DEBUG_MARGIN
4706 TestDebugMargin();
4707#else
4708 TestPool_SameSize();
4709 TestHeapSizeLimit();
4710#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02004711#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
4712 TestAllocationsInitialization();
4713#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01004714 TestMapping();
4715 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02004716 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02004717 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004718 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004719
Adam Sawicki4338f662018-09-07 14:12:37 +02004720 BasicTestBuddyAllocator();
4721
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004722 {
4723 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02004724 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004725 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02004726 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004727 fclose(file);
4728 }
4729
Adam Sawickib8333fb2018-03-13 16:15:53 +01004730 TestDefragmentationSimple();
4731 TestDefragmentationFull();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02004732 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01004733
4734 // # Detailed tests
4735 FILE* file;
4736 fopen_s(&file, "Results.csv", "w");
4737 assert(file != NULL);
4738
4739 WriteMainTestResultHeader(file);
4740 PerformMainTests(file);
4741 //PerformCustomMainTest(file);
4742
4743 WritePoolTestResultHeader(file);
4744 PerformPoolTests(file);
4745 //PerformCustomPoolTest(file);
4746
4747 fclose(file);
4748
4749 wprintf(L"Done.\n");
4750}
4751
Adam Sawickif1a793c2018-03-13 15:42:22 +01004752#endif // #ifdef _WIN32