blob: cc2fb3ec70c39f6a365c8ecd751b80fb3b4309e1 [file] [log] [blame]
Adam Sawickif1a793c2018-03-13 15:42:22 +01001#include "Tests.h"
2#include "VmaUsage.h"
3#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004#include <atomic>
5#include <thread>
6#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +01007
8#ifdef _WIN32
9
Adam Sawicki33d2ce72018-08-27 13:59:13 +020010static const char* CODE_DESCRIPTION = "Foo";
11
Adam Sawickif2975342018-10-16 13:49:02 +020012extern VkCommandBuffer g_hTemporaryCommandBuffer;
13void BeginSingleTimeCommands();
14void EndSingleTimeCommands();
15
Adam Sawicki0a607132018-08-24 11:18:41 +020016enum CONFIG_TYPE {
17 CONFIG_TYPE_MINIMUM,
18 CONFIG_TYPE_SMALL,
19 CONFIG_TYPE_AVERAGE,
20 CONFIG_TYPE_LARGE,
21 CONFIG_TYPE_MAXIMUM,
22 CONFIG_TYPE_COUNT
23};
24
Adam Sawickif2975342018-10-16 13:49:02 +020025static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
26//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020027
Adam Sawickib8333fb2018-03-13 16:15:53 +010028enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
29
Adam Sawicki0667e332018-08-24 17:26:44 +020030static const char* FREE_ORDER_NAMES[] = {
31 "FORWARD",
32 "BACKWARD",
33 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020034};
35
Adam Sawicki80927152018-09-07 17:27:23 +020036// Copy of internal VmaAlgorithmToStr.
37static const char* AlgorithmToStr(uint32_t algorithm)
38{
39 switch(algorithm)
40 {
41 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
42 return "Linear";
43 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
44 return "Buddy";
45 case 0:
46 return "Default";
47 default:
48 assert(0);
49 return "";
50 }
51}
52
Adam Sawickib8333fb2018-03-13 16:15:53 +010053struct AllocationSize
54{
55 uint32_t Probability;
56 VkDeviceSize BufferSizeMin, BufferSizeMax;
57 uint32_t ImageSizeMin, ImageSizeMax;
58};
59
60struct Config
61{
62 uint32_t RandSeed;
63 VkDeviceSize BeginBytesToAllocate;
64 uint32_t AdditionalOperationCount;
65 VkDeviceSize MaxBytesToAllocate;
66 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
67 std::vector<AllocationSize> AllocationSizes;
68 uint32_t ThreadCount;
69 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
70 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020071 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010072};
73
74struct Result
75{
76 duration TotalTime;
77 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
78 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
79 VkDeviceSize TotalMemoryAllocated;
80 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
81};
82
83void TestDefragmentationSimple();
84void TestDefragmentationFull();
85
86struct PoolTestConfig
87{
88 uint32_t RandSeed;
89 uint32_t ThreadCount;
90 VkDeviceSize PoolSize;
91 uint32_t FrameCount;
92 uint32_t TotalItemCount;
93 // Range for number of items used in each frame.
94 uint32_t UsedItemCountMin, UsedItemCountMax;
95 // Percent of items to make unused, and possibly make some others used in each frame.
96 uint32_t ItemsToMakeUnusedPercent;
97 std::vector<AllocationSize> AllocationSizes;
98
99 VkDeviceSize CalcAvgResourceSize() const
100 {
101 uint32_t probabilitySum = 0;
102 VkDeviceSize sizeSum = 0;
103 for(size_t i = 0; i < AllocationSizes.size(); ++i)
104 {
105 const AllocationSize& allocSize = AllocationSizes[i];
106 if(allocSize.BufferSizeMax > 0)
107 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
108 else
109 {
110 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
111 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
112 }
113 probabilitySum += allocSize.Probability;
114 }
115 return sizeSum / probabilitySum;
116 }
117
118 bool UsesBuffers() const
119 {
120 for(size_t i = 0; i < AllocationSizes.size(); ++i)
121 if(AllocationSizes[i].BufferSizeMax > 0)
122 return true;
123 return false;
124 }
125
126 bool UsesImages() const
127 {
128 for(size_t i = 0; i < AllocationSizes.size(); ++i)
129 if(AllocationSizes[i].ImageSizeMax > 0)
130 return true;
131 return false;
132 }
133};
134
135struct PoolTestResult
136{
137 duration TotalTime;
138 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
139 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
140 size_t LostAllocationCount, LostAllocationTotalSize;
141 size_t FailedAllocationCount, FailedAllocationTotalSize;
142};
143
144static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
145
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200146static uint32_t g_FrameIndex = 0;
147
Adam Sawickib8333fb2018-03-13 16:15:53 +0100148struct BufferInfo
149{
150 VkBuffer Buffer = VK_NULL_HANDLE;
151 VmaAllocation Allocation = VK_NULL_HANDLE;
152};
153
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200154static uint32_t GetAllocationStrategyCount()
155{
156 uint32_t strategyCount = 0;
157 switch(ConfigType)
158 {
159 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
160 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
161 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
162 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
163 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
164 default: assert(0);
165 }
166 return strategyCount;
167}
168
169static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
170{
171 switch(allocStrategy)
172 {
173 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
174 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
175 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
176 case 0: return "Default"; break;
177 default: assert(0); return "";
178 }
179}
180
Adam Sawickib8333fb2018-03-13 16:15:53 +0100181static void InitResult(Result& outResult)
182{
183 outResult.TotalTime = duration::zero();
184 outResult.AllocationTimeMin = duration::max();
185 outResult.AllocationTimeAvg = duration::zero();
186 outResult.AllocationTimeMax = duration::min();
187 outResult.DeallocationTimeMin = duration::max();
188 outResult.DeallocationTimeAvg = duration::zero();
189 outResult.DeallocationTimeMax = duration::min();
190 outResult.TotalMemoryAllocated = 0;
191 outResult.FreeRangeSizeAvg = 0;
192 outResult.FreeRangeSizeMax = 0;
193}
194
195class TimeRegisterObj
196{
197public:
198 TimeRegisterObj(duration& min, duration& sum, duration& max) :
199 m_Min(min),
200 m_Sum(sum),
201 m_Max(max),
202 m_TimeBeg(std::chrono::high_resolution_clock::now())
203 {
204 }
205
206 ~TimeRegisterObj()
207 {
208 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
209 m_Sum += d;
210 if(d < m_Min) m_Min = d;
211 if(d > m_Max) m_Max = d;
212 }
213
214private:
215 duration& m_Min;
216 duration& m_Sum;
217 duration& m_Max;
218 time_point m_TimeBeg;
219};
220
221struct PoolTestThreadResult
222{
223 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
224 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
225 size_t AllocationCount, DeallocationCount;
226 size_t LostAllocationCount, LostAllocationTotalSize;
227 size_t FailedAllocationCount, FailedAllocationTotalSize;
228};
229
230class AllocationTimeRegisterObj : public TimeRegisterObj
231{
232public:
233 AllocationTimeRegisterObj(Result& result) :
234 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
235 {
236 }
237};
238
239class DeallocationTimeRegisterObj : public TimeRegisterObj
240{
241public:
242 DeallocationTimeRegisterObj(Result& result) :
243 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
244 {
245 }
246};
247
248class PoolAllocationTimeRegisterObj : public TimeRegisterObj
249{
250public:
251 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
252 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
253 {
254 }
255};
256
257class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
258{
259public:
260 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
261 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
262 {
263 }
264};
265
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200266static void CurrentTimeToStr(std::string& out)
267{
268 time_t rawTime; time(&rawTime);
269 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
270 char timeStr[128];
271 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
272 out = timeStr;
273}
274
Adam Sawickib8333fb2018-03-13 16:15:53 +0100275VkResult MainTest(Result& outResult, const Config& config)
276{
277 assert(config.ThreadCount > 0);
278
279 InitResult(outResult);
280
281 RandomNumberGenerator mainRand{config.RandSeed};
282
283 time_point timeBeg = std::chrono::high_resolution_clock::now();
284
285 std::atomic<size_t> allocationCount = 0;
286 VkResult res = VK_SUCCESS;
287
288 uint32_t memUsageProbabilitySum =
289 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
290 config.MemUsageProbability[2] + config.MemUsageProbability[3];
291 assert(memUsageProbabilitySum > 0);
292
293 uint32_t allocationSizeProbabilitySum = std::accumulate(
294 config.AllocationSizes.begin(),
295 config.AllocationSizes.end(),
296 0u,
297 [](uint32_t sum, const AllocationSize& allocSize) {
298 return sum + allocSize.Probability;
299 });
300
301 struct Allocation
302 {
303 VkBuffer Buffer;
304 VkImage Image;
305 VmaAllocation Alloc;
306 };
307
308 std::vector<Allocation> commonAllocations;
309 std::mutex commonAllocationsMutex;
310
311 auto Allocate = [&](
312 VkDeviceSize bufferSize,
313 const VkExtent2D imageExtent,
314 RandomNumberGenerator& localRand,
315 VkDeviceSize& totalAllocatedBytes,
316 std::vector<Allocation>& allocations) -> VkResult
317 {
318 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
319
320 uint32_t memUsageIndex = 0;
321 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
322 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
323 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
324
325 VmaAllocationCreateInfo memReq = {};
326 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200327 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100328
329 Allocation allocation = {};
330 VmaAllocationInfo allocationInfo;
331
332 // Buffer
333 if(bufferSize > 0)
334 {
335 assert(imageExtent.width == 0);
336 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
337 bufferInfo.size = bufferSize;
338 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
339
340 {
341 AllocationTimeRegisterObj timeRegisterObj{outResult};
342 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
343 }
344 }
345 // Image
346 else
347 {
348 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
349 imageInfo.imageType = VK_IMAGE_TYPE_2D;
350 imageInfo.extent.width = imageExtent.width;
351 imageInfo.extent.height = imageExtent.height;
352 imageInfo.extent.depth = 1;
353 imageInfo.mipLevels = 1;
354 imageInfo.arrayLayers = 1;
355 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
356 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
357 VK_IMAGE_TILING_OPTIMAL :
358 VK_IMAGE_TILING_LINEAR;
359 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
360 switch(memReq.usage)
361 {
362 case VMA_MEMORY_USAGE_GPU_ONLY:
363 switch(localRand.Generate() % 3)
364 {
365 case 0:
366 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
367 break;
368 case 1:
369 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
370 break;
371 case 2:
372 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
373 break;
374 }
375 break;
376 case VMA_MEMORY_USAGE_CPU_ONLY:
377 case VMA_MEMORY_USAGE_CPU_TO_GPU:
378 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
379 break;
380 case VMA_MEMORY_USAGE_GPU_TO_CPU:
381 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
382 break;
383 }
384 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
385 imageInfo.flags = 0;
386
387 {
388 AllocationTimeRegisterObj timeRegisterObj{outResult};
389 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
390 }
391 }
392
393 if(res == VK_SUCCESS)
394 {
395 ++allocationCount;
396 totalAllocatedBytes += allocationInfo.size;
397 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
398 if(useCommonAllocations)
399 {
400 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
401 commonAllocations.push_back(allocation);
402 }
403 else
404 allocations.push_back(allocation);
405 }
406 else
407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200408 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100409 }
410 return res;
411 };
412
413 auto GetNextAllocationSize = [&](
414 VkDeviceSize& outBufSize,
415 VkExtent2D& outImageSize,
416 RandomNumberGenerator& localRand)
417 {
418 outBufSize = 0;
419 outImageSize = {0, 0};
420
421 uint32_t allocSizeIndex = 0;
422 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
423 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
424 r -= config.AllocationSizes[allocSizeIndex++].Probability;
425
426 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
427 if(allocSize.BufferSizeMax > 0)
428 {
429 assert(allocSize.ImageSizeMax == 0);
430 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
431 outBufSize = allocSize.BufferSizeMin;
432 else
433 {
434 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
435 outBufSize = outBufSize / 16 * 16;
436 }
437 }
438 else
439 {
440 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
441 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
442 else
443 {
444 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
445 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
446 }
447 }
448 };
449
450 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
451 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
452
453 auto ThreadProc = [&](uint32_t randSeed) -> void
454 {
455 RandomNumberGenerator threadRand(randSeed);
456 VkDeviceSize threadTotalAllocatedBytes = 0;
457 std::vector<Allocation> threadAllocations;
458 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
459 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
460 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
461
462 // BEGIN ALLOCATIONS
463 for(;;)
464 {
465 VkDeviceSize bufferSize = 0;
466 VkExtent2D imageExtent = {};
467 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
468 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
469 threadBeginBytesToAllocate)
470 {
471 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
472 break;
473 }
474 else
475 break;
476 }
477
478 // ADDITIONAL ALLOCATIONS AND FREES
479 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
480 {
481 VkDeviceSize bufferSize = 0;
482 VkExtent2D imageExtent = {};
483 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
484
485 // true = allocate, false = free
486 bool allocate = threadRand.Generate() % 2 != 0;
487
488 if(allocate)
489 {
490 if(threadTotalAllocatedBytes +
491 bufferSize +
492 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
493 threadMaxBytesToAllocate)
494 {
495 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
496 break;
497 }
498 }
499 else
500 {
501 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
502 if(useCommonAllocations)
503 {
504 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
505 if(!commonAllocations.empty())
506 {
507 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
508 VmaAllocationInfo allocationInfo;
509 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
510 if(threadTotalAllocatedBytes >= allocationInfo.size)
511 {
512 DeallocationTimeRegisterObj timeRegisterObj{outResult};
513 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
514 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
515 else
516 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
517 threadTotalAllocatedBytes -= allocationInfo.size;
518 commonAllocations.erase(commonAllocations.begin() + indexToFree);
519 }
520 }
521 }
522 else
523 {
524 if(!threadAllocations.empty())
525 {
526 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
527 VmaAllocationInfo allocationInfo;
528 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
529 if(threadTotalAllocatedBytes >= allocationInfo.size)
530 {
531 DeallocationTimeRegisterObj timeRegisterObj{outResult};
532 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
533 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
534 else
535 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
536 threadTotalAllocatedBytes -= allocationInfo.size;
537 threadAllocations.erase(threadAllocations.begin() + indexToFree);
538 }
539 }
540 }
541 }
542 }
543
544 ++numThreadsReachedMaxAllocations;
545
546 WaitForSingleObject(threadsFinishEvent, INFINITE);
547
548 // DEALLOCATION
549 while(!threadAllocations.empty())
550 {
551 size_t indexToFree = 0;
552 switch(config.FreeOrder)
553 {
554 case FREE_ORDER::FORWARD:
555 indexToFree = 0;
556 break;
557 case FREE_ORDER::BACKWARD:
558 indexToFree = threadAllocations.size() - 1;
559 break;
560 case FREE_ORDER::RANDOM:
561 indexToFree = mainRand.Generate() % threadAllocations.size();
562 break;
563 }
564
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 }
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 };
575
576 uint32_t threadRandSeed = mainRand.Generate();
577 std::vector<std::thread> bkgThreads;
578 for(size_t i = 0; i < config.ThreadCount; ++i)
579 {
580 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
581 }
582
583 // Wait for threads reached max allocations
584 while(numThreadsReachedMaxAllocations < config.ThreadCount)
585 Sleep(0);
586
587 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
588 VmaStats vmaStats = {};
589 vmaCalculateStats(g_hAllocator, &vmaStats);
590 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
591 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
592 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
593
594 // Signal threads to deallocate
595 SetEvent(threadsFinishEvent);
596
597 // Wait for threads finished
598 for(size_t i = 0; i < bkgThreads.size(); ++i)
599 bkgThreads[i].join();
600 bkgThreads.clear();
601
602 CloseHandle(threadsFinishEvent);
603
604 // Deallocate remaining common resources
605 while(!commonAllocations.empty())
606 {
607 size_t indexToFree = 0;
608 switch(config.FreeOrder)
609 {
610 case FREE_ORDER::FORWARD:
611 indexToFree = 0;
612 break;
613 case FREE_ORDER::BACKWARD:
614 indexToFree = commonAllocations.size() - 1;
615 break;
616 case FREE_ORDER::RANDOM:
617 indexToFree = mainRand.Generate() % commonAllocations.size();
618 break;
619 }
620
621 {
622 DeallocationTimeRegisterObj timeRegisterObj{outResult};
623 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
624 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
625 else
626 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
627 }
628 commonAllocations.erase(commonAllocations.begin() + indexToFree);
629 }
630
631 if(allocationCount)
632 {
633 outResult.AllocationTimeAvg /= allocationCount;
634 outResult.DeallocationTimeAvg /= allocationCount;
635 }
636
637 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
638
639 return res;
640}
641
Adam Sawickie44c6262018-06-15 14:30:39 +0200642static void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100643{
644 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200645 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100646 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200647 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100648}
649
650struct AllocInfo
651{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200652 VmaAllocation m_Allocation = VK_NULL_HANDLE;
653 VkBuffer m_Buffer = VK_NULL_HANDLE;
654 VkImage m_Image = VK_NULL_HANDLE;
655 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100656 union
657 {
658 VkBufferCreateInfo m_BufferInfo;
659 VkImageCreateInfo m_ImageInfo;
660 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200661
662 void CreateBuffer(
663 const VkBufferCreateInfo& bufCreateInfo,
664 const VmaAllocationCreateInfo& allocCreateInfo);
665 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100666};
667
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200668void AllocInfo::CreateBuffer(
669 const VkBufferCreateInfo& bufCreateInfo,
670 const VmaAllocationCreateInfo& allocCreateInfo)
671{
672 m_BufferInfo = bufCreateInfo;
673 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
674 TEST(res == VK_SUCCESS);
675}
676
677void AllocInfo::Destroy()
678{
679 if(m_Image)
680 {
681 vkDestroyImage(g_hDevice, m_Image, nullptr);
682 }
683 if(m_Buffer)
684 {
685 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
686 }
687 if(m_Allocation)
688 {
689 vmaFreeMemory(g_hAllocator, m_Allocation);
690 }
691}
692
Adam Sawickif2975342018-10-16 13:49:02 +0200693class StagingBufferCollection
694{
695public:
696 StagingBufferCollection() { }
697 ~StagingBufferCollection();
698 // Returns false if maximum total size of buffers would be exceeded.
699 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
700 void ReleaseAllBuffers();
701
702private:
703 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
704 struct BufInfo
705 {
706 VmaAllocation Allocation = VK_NULL_HANDLE;
707 VkBuffer Buffer = VK_NULL_HANDLE;
708 VkDeviceSize Size = VK_WHOLE_SIZE;
709 void* MappedPtr = nullptr;
710 bool Used = false;
711 };
712 std::vector<BufInfo> m_Bufs;
713 // Including both used and unused.
714 VkDeviceSize m_TotalSize = 0;
715};
716
717StagingBufferCollection::~StagingBufferCollection()
718{
719 for(size_t i = m_Bufs.size(); i--; )
720 {
721 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
722 }
723}
724
725bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
726{
727 assert(size <= MAX_TOTAL_SIZE);
728
729 // Try to find existing unused buffer with best size.
730 size_t bestIndex = SIZE_MAX;
731 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
732 {
733 BufInfo& currBufInfo = m_Bufs[i];
734 if(!currBufInfo.Used && currBufInfo.Size >= size &&
735 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
736 {
737 bestIndex = i;
738 }
739 }
740
741 if(bestIndex != SIZE_MAX)
742 {
743 m_Bufs[bestIndex].Used = true;
744 outBuffer = m_Bufs[bestIndex].Buffer;
745 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
746 return true;
747 }
748
749 // Allocate new buffer with requested size.
750 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
751 {
752 BufInfo bufInfo;
753 bufInfo.Size = size;
754 bufInfo.Used = true;
755
756 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
757 bufCreateInfo.size = size;
758 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
759
760 VmaAllocationCreateInfo allocCreateInfo = {};
761 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
762 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
763
764 VmaAllocationInfo allocInfo;
765 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
766 bufInfo.MappedPtr = allocInfo.pMappedData;
767 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
768
769 outBuffer = bufInfo.Buffer;
770 outMappedPtr = bufInfo.MappedPtr;
771
772 m_Bufs.push_back(std::move(bufInfo));
773
774 m_TotalSize += size;
775
776 return true;
777 }
778
779 // There are some unused but smaller buffers: Free them and try again.
780 bool hasUnused = false;
781 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
782 {
783 if(!m_Bufs[i].Used)
784 {
785 hasUnused = true;
786 break;
787 }
788 }
789 if(hasUnused)
790 {
791 for(size_t i = m_Bufs.size(); i--; )
792 {
793 if(!m_Bufs[i].Used)
794 {
795 m_TotalSize -= m_Bufs[i].Size;
796 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
797 m_Bufs.erase(m_Bufs.begin() + i);
798 }
799 }
800
801 return AcquireBuffer(size, outBuffer, outMappedPtr);
802 }
803
804 return false;
805}
806
807void StagingBufferCollection::ReleaseAllBuffers()
808{
809 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
810 {
811 m_Bufs[i].Used = false;
812 }
813}
814
815static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
816{
817 StagingBufferCollection stagingBufs;
818
819 bool cmdBufferStarted = false;
820 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
821 {
822 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
823 if(currAllocInfo.m_Buffer)
824 {
825 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
826
827 VkBuffer stagingBuf = VK_NULL_HANDLE;
828 void* stagingBufMappedPtr = nullptr;
829 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
830 {
831 TEST(cmdBufferStarted);
832 EndSingleTimeCommands();
833 stagingBufs.ReleaseAllBuffers();
834 cmdBufferStarted = false;
835
836 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
837 TEST(ok);
838 }
839
840 // Fill staging buffer.
841 {
842 assert(size % sizeof(uint32_t) == 0);
843 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
844 uint32_t val = currAllocInfo.m_StartValue;
845 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
846 {
847 *stagingValPtr = val;
848 ++stagingValPtr;
849 ++val;
850 }
851 }
852
853 // Issue copy command from staging buffer to destination buffer.
854 if(!cmdBufferStarted)
855 {
856 cmdBufferStarted = true;
857 BeginSingleTimeCommands();
858 }
859
860 VkBufferCopy copy = {};
861 copy.srcOffset = 0;
862 copy.dstOffset = 0;
863 copy.size = size;
864 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
865 }
866 else
867 {
868 TEST(0 && "Images not currently supported.");
869 }
870 }
871
872 if(cmdBufferStarted)
873 {
874 EndSingleTimeCommands();
875 stagingBufs.ReleaseAllBuffers();
876 }
877}
878
879static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
880{
881 StagingBufferCollection stagingBufs;
882
883 bool cmdBufferStarted = false;
884 size_t validateAllocIndexOffset = 0;
885 std::vector<void*> validateStagingBuffers;
886 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
887 {
888 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
889 if(currAllocInfo.m_Buffer)
890 {
891 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
892
893 VkBuffer stagingBuf = VK_NULL_HANDLE;
894 void* stagingBufMappedPtr = nullptr;
895 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
896 {
897 TEST(cmdBufferStarted);
898 EndSingleTimeCommands();
899 cmdBufferStarted = false;
900
901 for(size_t validateIndex = 0;
902 validateIndex < validateStagingBuffers.size();
903 ++validateIndex)
904 {
905 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
906 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
907 TEST(validateSize % sizeof(uint32_t) == 0);
908 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
909 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
910 bool valid = true;
911 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
912 {
913 if(*stagingValPtr != val)
914 {
915 valid = false;
916 break;
917 }
918 ++stagingValPtr;
919 ++val;
920 }
921 TEST(valid);
922 }
923
924 stagingBufs.ReleaseAllBuffers();
925
926 validateAllocIndexOffset = allocInfoIndex;
927 validateStagingBuffers.clear();
928
929 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
930 TEST(ok);
931 }
932
933 // Issue copy command from staging buffer to destination buffer.
934 if(!cmdBufferStarted)
935 {
936 cmdBufferStarted = true;
937 BeginSingleTimeCommands();
938 }
939
940 VkBufferCopy copy = {};
941 copy.srcOffset = 0;
942 copy.dstOffset = 0;
943 copy.size = size;
944 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
945
946 // Sava mapped pointer for later validation.
947 validateStagingBuffers.push_back(stagingBufMappedPtr);
948 }
949 else
950 {
951 TEST(0 && "Images not currently supported.");
952 }
953 }
954
955 if(cmdBufferStarted)
956 {
957 EndSingleTimeCommands();
958
959 for(size_t validateIndex = 0;
960 validateIndex < validateStagingBuffers.size();
961 ++validateIndex)
962 {
963 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
964 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
965 TEST(validateSize % sizeof(uint32_t) == 0);
966 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
967 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
968 bool valid = true;
969 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
970 {
971 if(*stagingValPtr != val)
972 {
973 valid = false;
974 break;
975 }
976 ++stagingValPtr;
977 ++val;
978 }
979 TEST(valid);
980 }
981
982 stagingBufs.ReleaseAllBuffers();
983 }
984}
985
Adam Sawickib8333fb2018-03-13 16:15:53 +0100986static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
987{
988 outMemReq = {};
989 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
990 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
991}
992
993static void CreateBuffer(
994 VmaPool pool,
995 const VkBufferCreateInfo& bufCreateInfo,
996 bool persistentlyMapped,
997 AllocInfo& outAllocInfo)
998{
999 outAllocInfo = {};
1000 outAllocInfo.m_BufferInfo = bufCreateInfo;
1001
1002 VmaAllocationCreateInfo allocCreateInfo = {};
1003 allocCreateInfo.pool = pool;
1004 if(persistentlyMapped)
1005 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1006
1007 VmaAllocationInfo vmaAllocInfo = {};
1008 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1009
1010 // Setup StartValue and fill.
1011 {
1012 outAllocInfo.m_StartValue = (uint32_t)rand();
1013 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001014 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015 if(!persistentlyMapped)
1016 {
1017 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1018 }
1019
1020 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001021 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001022 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1023 data[i] = value++;
1024
1025 if(!persistentlyMapped)
1026 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1027 }
1028}
1029
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001030static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001031{
1032 outAllocation.m_Allocation = nullptr;
1033 outAllocation.m_Buffer = nullptr;
1034 outAllocation.m_Image = nullptr;
1035 outAllocation.m_StartValue = (uint32_t)rand();
1036
1037 VmaAllocationCreateInfo vmaMemReq;
1038 GetMemReq(vmaMemReq);
1039
1040 VmaAllocationInfo allocInfo;
1041
1042 const bool isBuffer = true;//(rand() & 0x1) != 0;
1043 const bool isLarge = (rand() % 16) == 0;
1044 if(isBuffer)
1045 {
1046 const uint32_t bufferSize = isLarge ?
1047 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1048 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1049
1050 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1051 bufferInfo.size = bufferSize;
1052 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1053
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001054 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001055 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001056 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001057 }
1058 else
1059 {
1060 const uint32_t imageSizeX = isLarge ?
1061 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1062 rand() % 1024 + 1; // 1 ... 1024
1063 const uint32_t imageSizeY = isLarge ?
1064 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1065 rand() % 1024 + 1; // 1 ... 1024
1066
1067 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1068 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1069 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1070 imageInfo.extent.width = imageSizeX;
1071 imageInfo.extent.height = imageSizeY;
1072 imageInfo.extent.depth = 1;
1073 imageInfo.mipLevels = 1;
1074 imageInfo.arrayLayers = 1;
1075 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1076 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1077 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1078 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1079
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001080 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001081 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001082 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 }
1084
1085 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1086 if(allocInfo.pMappedData == nullptr)
1087 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001088 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001089 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001090 }
1091
1092 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001093 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001094 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1095 data[i] = value++;
1096
1097 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001098 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001099}
1100
1101static void DestroyAllocation(const AllocInfo& allocation)
1102{
1103 if(allocation.m_Buffer)
1104 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1105 else
1106 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1107}
1108
1109static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1110{
1111 for(size_t i = allocations.size(); i--; )
1112 DestroyAllocation(allocations[i]);
1113 allocations.clear();
1114}
1115
1116static void ValidateAllocationData(const AllocInfo& allocation)
1117{
1118 VmaAllocationInfo allocInfo;
1119 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1120
1121 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1122 if(allocInfo.pMappedData == nullptr)
1123 {
1124 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126 }
1127
1128 uint32_t value = allocation.m_StartValue;
1129 bool ok = true;
1130 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001131 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001132 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1133 {
1134 if(data[i] != value++)
1135 {
1136 ok = false;
1137 break;
1138 }
1139 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001140 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001141
1142 if(allocInfo.pMappedData == nullptr)
1143 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1144}
1145
1146static void RecreateAllocationResource(AllocInfo& allocation)
1147{
1148 VmaAllocationInfo allocInfo;
1149 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1150
1151 if(allocation.m_Buffer)
1152 {
1153 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1154
1155 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001156 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001157
1158 // Just to silence validation layer warnings.
1159 VkMemoryRequirements vkMemReq;
1160 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001161 TEST(vkMemReq.size == allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001162
1163 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001164 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001165 }
1166 else
1167 {
1168 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1169
1170 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001171 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001172
1173 // Just to silence validation layer warnings.
1174 VkMemoryRequirements vkMemReq;
1175 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1176
1177 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001178 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001179 }
1180}
1181
1182static void Defragment(AllocInfo* allocs, size_t allocCount,
1183 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1184 VmaDefragmentationStats* defragmentationStats = nullptr)
1185{
1186 std::vector<VmaAllocation> vmaAllocs(allocCount);
1187 for(size_t i = 0; i < allocCount; ++i)
1188 vmaAllocs[i] = allocs[i].m_Allocation;
1189
1190 std::vector<VkBool32> allocChanged(allocCount);
1191
1192 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1193 defragmentationInfo, defragmentationStats) );
1194
1195 for(size_t i = 0; i < allocCount; ++i)
1196 {
1197 if(allocChanged[i])
1198 {
1199 RecreateAllocationResource(allocs[i]);
1200 }
1201 }
1202}
1203
1204static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1205{
1206 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1207 ValidateAllocationData(allocInfo);
1208 });
1209}
1210
1211void TestDefragmentationSimple()
1212{
1213 wprintf(L"Test defragmentation simple\n");
1214
1215 RandomNumberGenerator rand(667);
1216
1217 const VkDeviceSize BUF_SIZE = 0x10000;
1218 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1219
1220 const VkDeviceSize MIN_BUF_SIZE = 32;
1221 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1222 auto RandomBufSize = [&]() -> VkDeviceSize {
1223 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1224 };
1225
1226 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1227 bufCreateInfo.size = BUF_SIZE;
1228 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1229
1230 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1231 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1232
1233 uint32_t memTypeIndex = UINT32_MAX;
1234 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1235
1236 VmaPoolCreateInfo poolCreateInfo = {};
1237 poolCreateInfo.blockSize = BLOCK_SIZE;
1238 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1239
1240 VmaPool pool;
1241 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1242
1243 std::vector<AllocInfo> allocations;
1244
1245 // persistentlyMappedOption = 0 - not persistently mapped.
1246 // persistentlyMappedOption = 1 - persistently mapped.
1247 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1248 {
1249 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1250 const bool persistentlyMapped = persistentlyMappedOption != 0;
1251
1252 // # Test 1
1253 // Buffers of fixed size.
1254 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1255 // Expected result: at least 1 block freed.
1256 {
1257 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1258 {
1259 AllocInfo allocInfo;
1260 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1261 allocations.push_back(allocInfo);
1262 }
1263
1264 for(size_t i = 1; i < allocations.size(); ++i)
1265 {
1266 DestroyAllocation(allocations[i]);
1267 allocations.erase(allocations.begin() + i);
1268 }
1269
1270 VmaDefragmentationStats defragStats;
1271 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001272 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1273 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001274
1275 ValidateAllocationsData(allocations.data(), allocations.size());
1276
1277 DestroyAllAllocations(allocations);
1278 }
1279
1280 // # Test 2
1281 // Buffers of fixed size.
1282 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1283 // Expected result: Each of 4 interations makes some progress.
1284 {
1285 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1286 {
1287 AllocInfo allocInfo;
1288 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1289 allocations.push_back(allocInfo);
1290 }
1291
1292 for(size_t i = 1; i < allocations.size(); ++i)
1293 {
1294 DestroyAllocation(allocations[i]);
1295 allocations.erase(allocations.begin() + i);
1296 }
1297
1298 VmaDefragmentationInfo defragInfo = {};
1299 defragInfo.maxAllocationsToMove = 1;
1300 defragInfo.maxBytesToMove = BUF_SIZE;
1301
1302 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1303 {
1304 VmaDefragmentationStats defragStats;
1305 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001306 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001307 }
1308
1309 ValidateAllocationsData(allocations.data(), allocations.size());
1310
1311 DestroyAllAllocations(allocations);
1312 }
1313
1314 // # Test 3
1315 // Buffers of variable size.
1316 // Create a number of buffers. Remove some percent of them.
1317 // Defragment while having some percent of them unmovable.
1318 // Expected result: Just simple validation.
1319 {
1320 for(size_t i = 0; i < 100; ++i)
1321 {
1322 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1323 localBufCreateInfo.size = RandomBufSize();
1324
1325 AllocInfo allocInfo;
1326 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1327 allocations.push_back(allocInfo);
1328 }
1329
1330 const uint32_t percentToDelete = 60;
1331 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1332 for(size_t i = 0; i < numberToDelete; ++i)
1333 {
1334 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1335 DestroyAllocation(allocations[indexToDelete]);
1336 allocations.erase(allocations.begin() + indexToDelete);
1337 }
1338
1339 // Non-movable allocations will be at the beginning of allocations array.
1340 const uint32_t percentNonMovable = 20;
1341 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1342 for(size_t i = 0; i < numberNonMovable; ++i)
1343 {
1344 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1345 if(indexNonMovable != i)
1346 std::swap(allocations[i], allocations[indexNonMovable]);
1347 }
1348
1349 VmaDefragmentationStats defragStats;
1350 Defragment(
1351 allocations.data() + numberNonMovable,
1352 allocations.size() - numberNonMovable,
1353 nullptr, &defragStats);
1354
1355 ValidateAllocationsData(allocations.data(), allocations.size());
1356
1357 DestroyAllAllocations(allocations);
1358 }
1359 }
1360
1361 vmaDestroyPool(g_hAllocator, pool);
1362}
1363
1364void TestDefragmentationFull()
1365{
1366 std::vector<AllocInfo> allocations;
1367
1368 // Create initial allocations.
1369 for(size_t i = 0; i < 400; ++i)
1370 {
1371 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001372 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001373 allocations.push_back(allocation);
1374 }
1375
1376 // Delete random allocations
1377 const size_t allocationsToDeletePercent = 80;
1378 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1379 for(size_t i = 0; i < allocationsToDelete; ++i)
1380 {
1381 size_t index = (size_t)rand() % allocations.size();
1382 DestroyAllocation(allocations[index]);
1383 allocations.erase(allocations.begin() + index);
1384 }
1385
1386 for(size_t i = 0; i < allocations.size(); ++i)
1387 ValidateAllocationData(allocations[i]);
1388
Adam Sawicki0667e332018-08-24 17:26:44 +02001389 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001390
1391 {
1392 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1393 for(size_t i = 0; i < allocations.size(); ++i)
1394 vmaAllocations[i] = allocations[i].m_Allocation;
1395
1396 const size_t nonMovablePercent = 0;
1397 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1398 for(size_t i = 0; i < nonMovableCount; ++i)
1399 {
1400 size_t index = (size_t)rand() % vmaAllocations.size();
1401 vmaAllocations.erase(vmaAllocations.begin() + index);
1402 }
1403
1404 const uint32_t defragCount = 1;
1405 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1406 {
1407 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1408
1409 VmaDefragmentationInfo defragmentationInfo;
1410 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1411 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1412
1413 wprintf(L"Defragmentation #%u\n", defragIndex);
1414
1415 time_point begTime = std::chrono::high_resolution_clock::now();
1416
1417 VmaDefragmentationStats stats;
1418 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001419 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001420
1421 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1422
1423 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1424 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1425 wprintf(L"Time: %.2f s\n", defragmentDuration);
1426
1427 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1428 {
1429 if(allocationsChanged[i])
1430 {
1431 RecreateAllocationResource(allocations[i]);
1432 }
1433 }
1434
1435 for(size_t i = 0; i < allocations.size(); ++i)
1436 ValidateAllocationData(allocations[i]);
1437
Adam Sawicki0667e332018-08-24 17:26:44 +02001438 //wchar_t fileName[MAX_PATH];
1439 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1440 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001441 }
1442 }
1443
1444 // Destroy all remaining allocations.
1445 DestroyAllAllocations(allocations);
1446}
1447
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001448static void TestDefragmentationGpu()
1449{
1450 wprintf(L"Test defragmentation GPU\n");
1451
1452 std::vector<AllocInfo> allocations;
1453
1454 // Create that many allocations to surely fill 3 new blocks of 256 MB.
1455 const VkDeviceSize bufSize = 10ull * 1024 * 1024;
1456 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
1457 const size_t bufCount = (size_t)(totalSize / bufSize);
1458 const size_t percentToLeave = 20;
1459 RandomNumberGenerator rand = { 234522 };
1460
1461 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1462 bufCreateInfo.size = bufSize;
Adam Sawicki440307e2018-10-18 15:05:19 +02001463 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1464 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1465 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001466
1467 VmaAllocationCreateInfo allocCreateInfo = {};
1468 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1469 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1470 allocCreateInfo.pUserData = "TestDefragmentationGpu";
1471
1472 // Create all intended buffers.
1473 for(size_t i = 0; i < bufCount; ++i)
1474 {
1475 AllocInfo alloc;
1476 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1477 alloc.m_StartValue = rand.Generate();
1478 allocations.push_back(alloc);
1479 }
1480
1481 // Destroy some percentage of them.
1482 {
1483 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1484 for(size_t i = 0; i < buffersToDestroy; ++i)
1485 {
1486 const size_t index = rand.Generate() % allocations.size();
1487 allocations[index].Destroy();
1488 allocations.erase(allocations.begin() + index);
1489 }
1490 }
1491
1492 // Fill them with meaningful data.
1493 UploadGpuData(allocations.data(), allocations.size());
1494
1495 SaveAllocatorStatsToFile(L"GPU_defragmentation_A_before.json");
1496
1497 // Defragment using GPU only.
1498 {
1499 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001500
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001501 std::vector<VmaAllocation> allocationPtrs(allocCount);
1502 std::vector<VkBool32> allocationChanged(allocCount);
1503 for(size_t i = 0; i < allocCount; ++i)
1504 {
1505 allocationPtrs[i] = allocations[i].m_Allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001506 }
Adam Sawicki440307e2018-10-18 15:05:19 +02001507 memset(allocationChanged.data(), 0, allocCount * sizeof(VkBool32));
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001508
1509 BeginSingleTimeCommands();
1510
1511 VmaDefragmentationInfo2 defragInfo = {};
1512 defragInfo.allocationCount = (uint32_t)allocCount;
1513 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001514 defragInfo.pAllocationsChanged = allocationChanged.data();
1515 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001516 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1517 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1518
1519 VmaDefragmentationStats stats = {};
1520 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1521 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1522 TEST(res >= VK_SUCCESS);
1523
1524 EndSingleTimeCommands();
1525
1526 vmaDefragmentationEnd(g_hAllocator, ctx);
1527
1528 for(size_t i = 0; i < allocCount; ++i)
1529 {
1530 if(allocationChanged[i])
1531 {
1532 RecreateAllocationResource(allocations[i]);
1533 }
1534 }
1535
Adam Sawicki440307e2018-10-18 15:05:19 +02001536 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1537 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
1538 TEST(stats.allocationsLost == 0);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001539 }
1540
1541 ValidateGpuData(allocations.data(), allocations.size());
1542
1543 SaveAllocatorStatsToFile(L"GPU_defragmentation_B_after.json");
1544
1545 // Destroy all remaining buffers.
1546 for(size_t i = allocations.size(); i--; )
1547 {
1548 allocations[i].Destroy();
1549 }
1550}
1551
Adam Sawickib8333fb2018-03-13 16:15:53 +01001552static void TestUserData()
1553{
1554 VkResult res;
1555
1556 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1557 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1558 bufCreateInfo.size = 0x10000;
1559
1560 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1561 {
1562 // Opaque pointer
1563 {
1564
1565 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1566 void* pointerToSomething = &res;
1567
1568 VmaAllocationCreateInfo allocCreateInfo = {};
1569 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1570 allocCreateInfo.pUserData = numberAsPointer;
1571 if(testIndex == 1)
1572 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1573
1574 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1575 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001576 TEST(res == VK_SUCCESS);
1577 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001578
1579 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001580 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001581
1582 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1583 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001584 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001585
1586 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1587 }
1588
1589 // String
1590 {
1591 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1592 const char* name2 = "2";
1593 const size_t name1Len = strlen(name1);
1594
1595 char* name1Buf = new char[name1Len + 1];
1596 strcpy_s(name1Buf, name1Len + 1, name1);
1597
1598 VmaAllocationCreateInfo allocCreateInfo = {};
1599 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1600 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1601 allocCreateInfo.pUserData = name1Buf;
1602 if(testIndex == 1)
1603 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1604
1605 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1606 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001607 TEST(res == VK_SUCCESS);
1608 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1609 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001610
1611 delete[] name1Buf;
1612
1613 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001614 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001615
1616 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1617 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001618 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001619
1620 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1621 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001622 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001623
1624 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1625 }
1626 }
1627}
1628
1629static void TestMemoryRequirements()
1630{
1631 VkResult res;
1632 VkBuffer buf;
1633 VmaAllocation alloc;
1634 VmaAllocationInfo allocInfo;
1635
1636 const VkPhysicalDeviceMemoryProperties* memProps;
1637 vmaGetMemoryProperties(g_hAllocator, &memProps);
1638
1639 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1640 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1641 bufInfo.size = 128;
1642
1643 VmaAllocationCreateInfo allocCreateInfo = {};
1644
1645 // No requirements.
1646 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001647 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001648 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1649
1650 // Usage.
1651 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1652 allocCreateInfo.requiredFlags = 0;
1653 allocCreateInfo.preferredFlags = 0;
1654 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1655
1656 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001657 TEST(res == VK_SUCCESS);
1658 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001659 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1660
1661 // Required flags, preferred flags.
1662 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1663 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1664 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1665 allocCreateInfo.memoryTypeBits = 0;
1666
1667 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001668 TEST(res == VK_SUCCESS);
1669 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1670 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001671 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1672
1673 // memoryTypeBits.
1674 const uint32_t memType = allocInfo.memoryType;
1675 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1676 allocCreateInfo.requiredFlags = 0;
1677 allocCreateInfo.preferredFlags = 0;
1678 allocCreateInfo.memoryTypeBits = 1u << memType;
1679
1680 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001681 TEST(res == VK_SUCCESS);
1682 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001683 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1684
1685}
1686
1687static void TestBasics()
1688{
1689 VkResult res;
1690
1691 TestMemoryRequirements();
1692
1693 // Lost allocation
1694 {
1695 VmaAllocation alloc = VK_NULL_HANDLE;
1696 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001697 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001698
1699 VmaAllocationInfo allocInfo;
1700 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001701 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1702 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001703
1704 vmaFreeMemory(g_hAllocator, alloc);
1705 }
1706
1707 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1708 {
1709 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1710 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1711 bufCreateInfo.size = 128;
1712
1713 VmaAllocationCreateInfo allocCreateInfo = {};
1714 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1715 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1716
1717 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1718 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001719 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001720
1721 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1722
1723 // Same with OWN_MEMORY.
1724 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1725
1726 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001727 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001728
1729 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1730 }
1731
1732 TestUserData();
1733}
1734
1735void TestHeapSizeLimit()
1736{
1737 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1738 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1739
1740 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1741 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1742 {
1743 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1744 }
1745
1746 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1747 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1748 allocatorCreateInfo.device = g_hDevice;
1749 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1750
1751 VmaAllocator hAllocator;
1752 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001753 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001754
1755 struct Item
1756 {
1757 VkBuffer hBuf;
1758 VmaAllocation hAlloc;
1759 };
1760 std::vector<Item> items;
1761
1762 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1763 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1764
1765 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
1766 VmaAllocationInfo ownAllocInfo;
1767 {
1768 VmaAllocationCreateInfo allocCreateInfo = {};
1769 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1770 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1771
1772 bufCreateInfo.size = BLOCK_SIZE / 2;
1773
1774 for(size_t i = 0; i < 2; ++i)
1775 {
1776 Item item;
1777 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001778 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001779 items.push_back(item);
1780 }
1781 }
1782
1783 // Create pool to make sure allocations must be out of this memory type.
1784 VmaPoolCreateInfo poolCreateInfo = {};
1785 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
1786 poolCreateInfo.blockSize = BLOCK_SIZE;
1787
1788 VmaPool hPool;
1789 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001790 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001791
1792 // 2. Allocate normal buffers from all the remaining memory.
1793 {
1794 VmaAllocationCreateInfo allocCreateInfo = {};
1795 allocCreateInfo.pool = hPool;
1796
1797 bufCreateInfo.size = BLOCK_SIZE / 2;
1798
1799 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
1800 for(size_t i = 0; i < bufCount; ++i)
1801 {
1802 Item item;
1803 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001804 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001805 items.push_back(item);
1806 }
1807 }
1808
1809 // 3. Allocation of one more (even small) buffer should fail.
1810 {
1811 VmaAllocationCreateInfo allocCreateInfo = {};
1812 allocCreateInfo.pool = hPool;
1813
1814 bufCreateInfo.size = 128;
1815
1816 VkBuffer hBuf;
1817 VmaAllocation hAlloc;
1818 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001819 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001820 }
1821
1822 // Destroy everything.
1823 for(size_t i = items.size(); i--; )
1824 {
1825 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
1826 }
1827
1828 vmaDestroyPool(hAllocator, hPool);
1829
1830 vmaDestroyAllocator(hAllocator);
1831}
1832
Adam Sawicki212a4a62018-06-14 15:44:45 +02001833#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02001834static void TestDebugMargin()
1835{
1836 if(VMA_DEBUG_MARGIN == 0)
1837 {
1838 return;
1839 }
1840
1841 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02001842 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02001843
1844 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02001845 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02001846
1847 // Create few buffers of different size.
1848 const size_t BUF_COUNT = 10;
1849 BufferInfo buffers[BUF_COUNT];
1850 VmaAllocationInfo allocInfo[BUF_COUNT];
1851 for(size_t i = 0; i < 10; ++i)
1852 {
1853 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02001854 // Last one will be mapped.
1855 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02001856
1857 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001858 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02001859 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001860 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001861
1862 if(i == BUF_COUNT - 1)
1863 {
1864 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02001865 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001866 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
1867 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
1868 }
Adam Sawicki73b16652018-06-11 16:39:25 +02001869 }
1870
1871 // Check if their offsets preserve margin between them.
1872 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
1873 {
1874 if(lhs.deviceMemory != rhs.deviceMemory)
1875 {
1876 return lhs.deviceMemory < rhs.deviceMemory;
1877 }
1878 return lhs.offset < rhs.offset;
1879 });
1880 for(size_t i = 1; i < BUF_COUNT; ++i)
1881 {
1882 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
1883 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02001884 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02001885 }
1886 }
1887
Adam Sawicki212a4a62018-06-14 15:44:45 +02001888 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001889 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02001890
Adam Sawicki73b16652018-06-11 16:39:25 +02001891 // Destroy all buffers.
1892 for(size_t i = BUF_COUNT; i--; )
1893 {
1894 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
1895 }
1896}
Adam Sawicki212a4a62018-06-14 15:44:45 +02001897#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02001898
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001899static void TestLinearAllocator()
1900{
1901 wprintf(L"Test linear allocator\n");
1902
1903 RandomNumberGenerator rand{645332};
1904
1905 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1906 sampleBufCreateInfo.size = 1024; // Whatever.
1907 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
1908
1909 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
1910 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1911
1912 VmaPoolCreateInfo poolCreateInfo = {};
1913 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001914 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001915
Adam Sawickiee082772018-06-20 17:45:49 +02001916 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001917 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
1918 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
1919
1920 VmaPool pool = nullptr;
1921 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001922 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001923
1924 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
1925
1926 VmaAllocationCreateInfo allocCreateInfo = {};
1927 allocCreateInfo.pool = pool;
1928
1929 constexpr size_t maxBufCount = 100;
1930 std::vector<BufferInfo> bufInfo;
1931
1932 constexpr VkDeviceSize bufSizeMin = 16;
1933 constexpr VkDeviceSize bufSizeMax = 1024;
1934 VmaAllocationInfo allocInfo;
1935 VkDeviceSize prevOffset = 0;
1936
1937 // Test one-time free.
1938 for(size_t i = 0; i < 2; ++i)
1939 {
1940 // Allocate number of buffers of varying size that surely fit into this block.
1941 VkDeviceSize bufSumSize = 0;
1942 for(size_t i = 0; i < maxBufCount; ++i)
1943 {
1944 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1945 BufferInfo newBufInfo;
1946 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1947 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001948 TEST(res == VK_SUCCESS);
1949 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001950 bufInfo.push_back(newBufInfo);
1951 prevOffset = allocInfo.offset;
1952 bufSumSize += bufCreateInfo.size;
1953 }
1954
1955 // Validate pool stats.
1956 VmaPoolStats stats;
1957 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001958 TEST(stats.size == poolCreateInfo.blockSize);
1959 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
1960 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001961
1962 // Destroy the buffers in random order.
1963 while(!bufInfo.empty())
1964 {
1965 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
1966 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
1967 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1968 bufInfo.erase(bufInfo.begin() + indexToDestroy);
1969 }
1970 }
1971
1972 // Test stack.
1973 {
1974 // Allocate number of buffers of varying size that surely fit into this block.
1975 for(size_t i = 0; i < maxBufCount; ++i)
1976 {
1977 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1978 BufferInfo newBufInfo;
1979 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
1980 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001981 TEST(res == VK_SUCCESS);
1982 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02001983 bufInfo.push_back(newBufInfo);
1984 prevOffset = allocInfo.offset;
1985 }
1986
1987 // Destroy few buffers from top of the stack.
1988 for(size_t i = 0; i < maxBufCount / 5; ++i)
1989 {
1990 const BufferInfo& currBufInfo = bufInfo.back();
1991 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
1992 bufInfo.pop_back();
1993 }
1994
1995 // Create some more
1996 for(size_t i = 0; i < maxBufCount / 5; ++i)
1997 {
1998 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
1999 BufferInfo newBufInfo;
2000 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2001 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002002 TEST(res == VK_SUCCESS);
2003 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002004 bufInfo.push_back(newBufInfo);
2005 prevOffset = allocInfo.offset;
2006 }
2007
2008 // Destroy the buffers in reverse order.
2009 while(!bufInfo.empty())
2010 {
2011 const BufferInfo& currBufInfo = bufInfo.back();
2012 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2013 bufInfo.pop_back();
2014 }
2015 }
2016
Adam Sawickiee082772018-06-20 17:45:49 +02002017 // Test ring buffer.
2018 {
2019 // Allocate number of buffers that surely fit into this block.
2020 bufCreateInfo.size = bufSizeMax;
2021 for(size_t i = 0; i < maxBufCount; ++i)
2022 {
2023 BufferInfo newBufInfo;
2024 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2025 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002026 TEST(res == VK_SUCCESS);
2027 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002028 bufInfo.push_back(newBufInfo);
2029 prevOffset = allocInfo.offset;
2030 }
2031
2032 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2033 const size_t buffersPerIter = maxBufCount / 10 - 1;
2034 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2035 for(size_t iter = 0; iter < iterCount; ++iter)
2036 {
2037 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2038 {
2039 const BufferInfo& currBufInfo = bufInfo.front();
2040 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2041 bufInfo.erase(bufInfo.begin());
2042 }
2043 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2044 {
2045 BufferInfo newBufInfo;
2046 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2047 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002048 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002049 bufInfo.push_back(newBufInfo);
2050 }
2051 }
2052
2053 // Allocate buffers until we reach out-of-memory.
2054 uint32_t debugIndex = 0;
2055 while(res == VK_SUCCESS)
2056 {
2057 BufferInfo newBufInfo;
2058 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2059 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2060 if(res == VK_SUCCESS)
2061 {
2062 bufInfo.push_back(newBufInfo);
2063 }
2064 else
2065 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002066 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002067 }
2068 ++debugIndex;
2069 }
2070
2071 // Destroy the buffers in random order.
2072 while(!bufInfo.empty())
2073 {
2074 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2075 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2076 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2077 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2078 }
2079 }
2080
Adam Sawicki680b2252018-08-22 14:47:32 +02002081 // Test double stack.
2082 {
2083 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2084 VkDeviceSize prevOffsetLower = 0;
2085 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2086 for(size_t i = 0; i < maxBufCount; ++i)
2087 {
2088 const bool upperAddress = (i % 2) != 0;
2089 if(upperAddress)
2090 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2091 else
2092 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2093 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2094 BufferInfo newBufInfo;
2095 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2096 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002097 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002098 if(upperAddress)
2099 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002100 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002101 prevOffsetUpper = allocInfo.offset;
2102 }
2103 else
2104 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002105 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002106 prevOffsetLower = allocInfo.offset;
2107 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002108 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002109 bufInfo.push_back(newBufInfo);
2110 }
2111
2112 // Destroy few buffers from top of the stack.
2113 for(size_t i = 0; i < maxBufCount / 5; ++i)
2114 {
2115 const BufferInfo& currBufInfo = bufInfo.back();
2116 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2117 bufInfo.pop_back();
2118 }
2119
2120 // Create some more
2121 for(size_t i = 0; i < maxBufCount / 5; ++i)
2122 {
2123 const bool upperAddress = (i % 2) != 0;
2124 if(upperAddress)
2125 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2126 else
2127 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2128 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2129 BufferInfo newBufInfo;
2130 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2131 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002132 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002133 bufInfo.push_back(newBufInfo);
2134 }
2135
2136 // Destroy the buffers in reverse order.
2137 while(!bufInfo.empty())
2138 {
2139 const BufferInfo& currBufInfo = bufInfo.back();
2140 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2141 bufInfo.pop_back();
2142 }
2143
2144 // Create buffers on both sides until we reach out of memory.
2145 prevOffsetLower = 0;
2146 prevOffsetUpper = poolCreateInfo.blockSize;
2147 res = VK_SUCCESS;
2148 for(size_t i = 0; res == VK_SUCCESS; ++i)
2149 {
2150 const bool upperAddress = (i % 2) != 0;
2151 if(upperAddress)
2152 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2153 else
2154 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2155 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2156 BufferInfo newBufInfo;
2157 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2158 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2159 if(res == VK_SUCCESS)
2160 {
2161 if(upperAddress)
2162 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002163 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002164 prevOffsetUpper = allocInfo.offset;
2165 }
2166 else
2167 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002168 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002169 prevOffsetLower = allocInfo.offset;
2170 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002171 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002172 bufInfo.push_back(newBufInfo);
2173 }
2174 }
2175
2176 // Destroy the buffers in random order.
2177 while(!bufInfo.empty())
2178 {
2179 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2180 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2181 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2182 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2183 }
2184
2185 // Create buffers on upper side only, constant size, until we reach out of memory.
2186 prevOffsetUpper = poolCreateInfo.blockSize;
2187 res = VK_SUCCESS;
2188 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2189 bufCreateInfo.size = bufSizeMax;
2190 for(size_t i = 0; res == VK_SUCCESS; ++i)
2191 {
2192 BufferInfo newBufInfo;
2193 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2194 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2195 if(res == VK_SUCCESS)
2196 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002197 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002198 prevOffsetUpper = allocInfo.offset;
2199 bufInfo.push_back(newBufInfo);
2200 }
2201 }
2202
2203 // Destroy the buffers in reverse order.
2204 while(!bufInfo.empty())
2205 {
2206 const BufferInfo& currBufInfo = bufInfo.back();
2207 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2208 bufInfo.pop_back();
2209 }
2210 }
2211
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002212 // Test ring buffer with lost allocations.
2213 {
2214 // Allocate number of buffers until pool is full.
2215 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2216 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2217 res = VK_SUCCESS;
2218 for(size_t i = 0; res == VK_SUCCESS; ++i)
2219 {
2220 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2221
2222 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2223
2224 BufferInfo newBufInfo;
2225 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2226 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2227 if(res == VK_SUCCESS)
2228 bufInfo.push_back(newBufInfo);
2229 }
2230
2231 // Free first half of it.
2232 {
2233 const size_t buffersToDelete = bufInfo.size() / 2;
2234 for(size_t i = 0; i < buffersToDelete; ++i)
2235 {
2236 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2237 }
2238 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2239 }
2240
2241 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002242 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002243 res = VK_SUCCESS;
2244 for(size_t i = 0; res == VK_SUCCESS; ++i)
2245 {
2246 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2247
2248 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2249
2250 BufferInfo newBufInfo;
2251 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2252 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2253 if(res == VK_SUCCESS)
2254 bufInfo.push_back(newBufInfo);
2255 }
2256
2257 VkDeviceSize firstNewOffset;
2258 {
2259 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2260
2261 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2262 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2263 bufCreateInfo.size = bufSizeMax;
2264
2265 BufferInfo newBufInfo;
2266 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2267 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002268 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002269 bufInfo.push_back(newBufInfo);
2270 firstNewOffset = allocInfo.offset;
2271
2272 // Make sure at least one buffer from the beginning became lost.
2273 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002274 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002275 }
2276
2277 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2278 size_t newCount = 1;
2279 for(;;)
2280 {
2281 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2282
2283 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2284
2285 BufferInfo newBufInfo;
2286 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2287 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002288 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002289 bufInfo.push_back(newBufInfo);
2290 ++newCount;
2291 if(allocInfo.offset < firstNewOffset)
2292 break;
2293 }
2294
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002295 // Delete buffers that are lost.
2296 for(size_t i = bufInfo.size(); i--; )
2297 {
2298 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2299 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2300 {
2301 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2302 bufInfo.erase(bufInfo.begin() + i);
2303 }
2304 }
2305
2306 // Test vmaMakePoolAllocationsLost
2307 {
2308 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2309
2310 size_t lostAllocCount = SIZE_MAX;
2311 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002312 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002313
2314 size_t realLostAllocCount = 0;
2315 for(size_t i = 0; i < bufInfo.size(); ++i)
2316 {
2317 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2318 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2319 ++realLostAllocCount;
2320 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002321 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002322 }
2323
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002324 // Destroy all the buffers in forward order.
2325 for(size_t i = 0; i < bufInfo.size(); ++i)
2326 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2327 bufInfo.clear();
2328 }
2329
Adam Sawicki70a683e2018-08-24 15:36:32 +02002330 vmaDestroyPool(g_hAllocator, pool);
2331}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002332
Adam Sawicki70a683e2018-08-24 15:36:32 +02002333static void TestLinearAllocatorMultiBlock()
2334{
2335 wprintf(L"Test linear allocator multi block\n");
2336
2337 RandomNumberGenerator rand{345673};
2338
2339 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2340 sampleBufCreateInfo.size = 1024 * 1024;
2341 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2342
2343 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2344 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2345
2346 VmaPoolCreateInfo poolCreateInfo = {};
2347 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2348 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002349 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002350
2351 VmaPool pool = nullptr;
2352 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002353 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002354
2355 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2356
2357 VmaAllocationCreateInfo allocCreateInfo = {};
2358 allocCreateInfo.pool = pool;
2359
2360 std::vector<BufferInfo> bufInfo;
2361 VmaAllocationInfo allocInfo;
2362
2363 // Test one-time free.
2364 {
2365 // Allocate buffers until we move to a second block.
2366 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2367 for(uint32_t i = 0; ; ++i)
2368 {
2369 BufferInfo newBufInfo;
2370 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2371 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002372 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002373 bufInfo.push_back(newBufInfo);
2374 if(lastMem && allocInfo.deviceMemory != lastMem)
2375 {
2376 break;
2377 }
2378 lastMem = allocInfo.deviceMemory;
2379 }
2380
Adam Sawickib8d34d52018-10-03 17:41:20 +02002381 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002382
2383 // Make sure that pool has now two blocks.
2384 VmaPoolStats poolStats = {};
2385 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002386 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002387
2388 // Destroy all the buffers in random order.
2389 while(!bufInfo.empty())
2390 {
2391 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2392 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2393 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2394 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2395 }
2396
2397 // Make sure that pool has now at most one block.
2398 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002399 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002400 }
2401
2402 // Test stack.
2403 {
2404 // Allocate buffers until we move to a second block.
2405 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2406 for(uint32_t i = 0; ; ++i)
2407 {
2408 BufferInfo newBufInfo;
2409 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2410 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002411 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002412 bufInfo.push_back(newBufInfo);
2413 if(lastMem && allocInfo.deviceMemory != lastMem)
2414 {
2415 break;
2416 }
2417 lastMem = allocInfo.deviceMemory;
2418 }
2419
Adam Sawickib8d34d52018-10-03 17:41:20 +02002420 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002421
2422 // Add few more buffers.
2423 for(uint32_t i = 0; i < 5; ++i)
2424 {
2425 BufferInfo newBufInfo;
2426 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2427 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002428 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002429 bufInfo.push_back(newBufInfo);
2430 }
2431
2432 // Make sure that pool has now two blocks.
2433 VmaPoolStats poolStats = {};
2434 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002435 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002436
2437 // Delete half of buffers, LIFO.
2438 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2439 {
2440 const BufferInfo& currBufInfo = bufInfo.back();
2441 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2442 bufInfo.pop_back();
2443 }
2444
2445 // Add one more buffer.
2446 BufferInfo newBufInfo;
2447 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2448 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002449 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002450 bufInfo.push_back(newBufInfo);
2451
2452 // Make sure that pool has now one block.
2453 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002454 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002455
2456 // Delete all the remaining buffers, LIFO.
2457 while(!bufInfo.empty())
2458 {
2459 const BufferInfo& currBufInfo = bufInfo.back();
2460 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2461 bufInfo.pop_back();
2462 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002463 }
2464
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002465 vmaDestroyPool(g_hAllocator, pool);
2466}
2467
Adam Sawickifd11d752018-08-22 15:02:10 +02002468static void ManuallyTestLinearAllocator()
2469{
2470 VmaStats origStats;
2471 vmaCalculateStats(g_hAllocator, &origStats);
2472
2473 wprintf(L"Manually test linear allocator\n");
2474
2475 RandomNumberGenerator rand{645332};
2476
2477 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2478 sampleBufCreateInfo.size = 1024; // Whatever.
2479 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2480
2481 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2482 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2483
2484 VmaPoolCreateInfo poolCreateInfo = {};
2485 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002486 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002487
2488 poolCreateInfo.blockSize = 10 * 1024;
2489 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2490 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2491
2492 VmaPool pool = nullptr;
2493 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002494 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002495
2496 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2497
2498 VmaAllocationCreateInfo allocCreateInfo = {};
2499 allocCreateInfo.pool = pool;
2500
2501 std::vector<BufferInfo> bufInfo;
2502 VmaAllocationInfo allocInfo;
2503 BufferInfo newBufInfo;
2504
2505 // Test double stack.
2506 {
2507 /*
2508 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2509 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2510
2511 Totally:
2512 1 block allocated
2513 10240 Vulkan bytes
2514 6 new allocations
2515 2256 bytes in allocations
2516 */
2517
2518 bufCreateInfo.size = 32;
2519 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2520 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002521 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002522 bufInfo.push_back(newBufInfo);
2523
2524 bufCreateInfo.size = 1024;
2525 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2526 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002527 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002528 bufInfo.push_back(newBufInfo);
2529
2530 bufCreateInfo.size = 32;
2531 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2532 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002533 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002534 bufInfo.push_back(newBufInfo);
2535
2536 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2537
2538 bufCreateInfo.size = 128;
2539 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2540 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002541 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002542 bufInfo.push_back(newBufInfo);
2543
2544 bufCreateInfo.size = 1024;
2545 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2546 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002547 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002548 bufInfo.push_back(newBufInfo);
2549
2550 bufCreateInfo.size = 16;
2551 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2552 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002553 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002554 bufInfo.push_back(newBufInfo);
2555
2556 VmaStats currStats;
2557 vmaCalculateStats(g_hAllocator, &currStats);
2558 VmaPoolStats poolStats;
2559 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2560
2561 char* statsStr = nullptr;
2562 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2563
2564 // PUT BREAKPOINT HERE TO CHECK.
2565 // Inspect: currStats versus origStats, poolStats, statsStr.
2566 int I = 0;
2567
2568 vmaFreeStatsString(g_hAllocator, statsStr);
2569
2570 // Destroy the buffers in reverse order.
2571 while(!bufInfo.empty())
2572 {
2573 const BufferInfo& currBufInfo = bufInfo.back();
2574 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2575 bufInfo.pop_back();
2576 }
2577 }
2578
2579 vmaDestroyPool(g_hAllocator, pool);
2580}
2581
Adam Sawicki80927152018-09-07 17:27:23 +02002582static void BenchmarkAlgorithmsCase(FILE* file,
2583 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002584 bool empty,
2585 VmaAllocationCreateFlags allocStrategy,
2586 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002587{
2588 RandomNumberGenerator rand{16223};
2589
2590 const VkDeviceSize bufSizeMin = 32;
2591 const VkDeviceSize bufSizeMax = 1024;
2592 const size_t maxBufCapacity = 10000;
2593 const uint32_t iterationCount = 10;
2594
2595 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2596 sampleBufCreateInfo.size = bufSizeMax;
2597 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2598
2599 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2600 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2601
2602 VmaPoolCreateInfo poolCreateInfo = {};
2603 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002604 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002605
2606 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002607 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002608 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2609
2610 VmaPool pool = nullptr;
2611 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002612 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002613
2614 // Buffer created just to get memory requirements. Never bound to any memory.
2615 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2616 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002617 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002618
2619 VkMemoryRequirements memReq = {};
2620 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2621
2622 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2623
2624 VmaAllocationCreateInfo allocCreateInfo = {};
2625 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002626 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002627
2628 VmaAllocation alloc;
2629 std::vector<VmaAllocation> baseAllocations;
2630
2631 if(!empty)
2632 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002633 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002634 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002635 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002636 {
2637 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2638 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002639 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002640 baseAllocations.push_back(alloc);
2641 totalSize += memReq.size;
2642 }
2643
2644 // Delete half of them, choose randomly.
2645 size_t allocsToDelete = baseAllocations.size() / 2;
2646 for(size_t i = 0; i < allocsToDelete; ++i)
2647 {
2648 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2649 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2650 baseAllocations.erase(baseAllocations.begin() + index);
2651 }
2652 }
2653
2654 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002655 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002656 std::vector<VmaAllocation> testAllocations;
2657 testAllocations.reserve(allocCount);
2658 duration allocTotalDuration = duration::zero();
2659 duration freeTotalDuration = duration::zero();
2660 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2661 {
2662 // Allocations
2663 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2664 for(size_t i = 0; i < allocCount; ++i)
2665 {
2666 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2667 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002668 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002669 testAllocations.push_back(alloc);
2670 }
2671 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2672
2673 // Deallocations
2674 switch(freeOrder)
2675 {
2676 case FREE_ORDER::FORWARD:
2677 // Leave testAllocations unchanged.
2678 break;
2679 case FREE_ORDER::BACKWARD:
2680 std::reverse(testAllocations.begin(), testAllocations.end());
2681 break;
2682 case FREE_ORDER::RANDOM:
2683 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2684 break;
2685 default: assert(0);
2686 }
2687
2688 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2689 for(size_t i = 0; i < allocCount; ++i)
2690 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2691 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2692
2693 testAllocations.clear();
2694 }
2695
2696 // Delete baseAllocations
2697 while(!baseAllocations.empty())
2698 {
2699 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2700 baseAllocations.pop_back();
2701 }
2702
2703 vmaDestroyPool(g_hAllocator, pool);
2704
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002705 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2706 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2707
Adam Sawicki80927152018-09-07 17:27:23 +02002708 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2709 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002710 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002711 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002712 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002713 allocTotalSeconds,
2714 freeTotalSeconds);
2715
2716 if(file)
2717 {
2718 std::string currTime;
2719 CurrentTimeToStr(currTime);
2720
Adam Sawicki80927152018-09-07 17:27:23 +02002721 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002722 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002723 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002724 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002725 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002726 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2727 allocTotalSeconds,
2728 freeTotalSeconds);
2729 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002730}
2731
Adam Sawicki80927152018-09-07 17:27:23 +02002732static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002733{
Adam Sawicki80927152018-09-07 17:27:23 +02002734 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002735
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002736 if(file)
2737 {
2738 fprintf(file,
2739 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002740 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002741 "Allocation time (s),Deallocation time (s)\n");
2742 }
2743
Adam Sawicki0a607132018-08-24 11:18:41 +02002744 uint32_t freeOrderCount = 1;
2745 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2746 freeOrderCount = 3;
2747 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2748 freeOrderCount = 2;
2749
2750 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002751 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002752
2753 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2754 {
2755 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2756 switch(freeOrderIndex)
2757 {
2758 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2759 case 1: freeOrder = FREE_ORDER::FORWARD; break;
2760 case 2: freeOrder = FREE_ORDER::RANDOM; break;
2761 default: assert(0);
2762 }
2763
2764 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
2765 {
Adam Sawicki80927152018-09-07 17:27:23 +02002766 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02002767 {
Adam Sawicki80927152018-09-07 17:27:23 +02002768 uint32_t algorithm = 0;
2769 switch(algorithmIndex)
2770 {
2771 case 0:
2772 break;
2773 case 1:
2774 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
2775 break;
2776 case 2:
2777 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2778 break;
2779 default:
2780 assert(0);
2781 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002782
Adam Sawicki80927152018-09-07 17:27:23 +02002783 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002784 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
2785 {
2786 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02002787 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002788 {
2789 switch(allocStrategyIndex)
2790 {
2791 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
2792 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
2793 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
2794 default: assert(0);
2795 }
2796 }
2797
Adam Sawicki80927152018-09-07 17:27:23 +02002798 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002799 file,
Adam Sawicki80927152018-09-07 17:27:23 +02002800 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002801 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002802 strategy,
2803 freeOrder); // freeOrder
2804 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002805 }
2806 }
2807 }
2808}
2809
Adam Sawickib8333fb2018-03-13 16:15:53 +01002810static void TestPool_SameSize()
2811{
2812 const VkDeviceSize BUF_SIZE = 1024 * 1024;
2813 const size_t BUF_COUNT = 100;
2814 VkResult res;
2815
2816 RandomNumberGenerator rand{123};
2817
2818 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2819 bufferInfo.size = BUF_SIZE;
2820 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2821
2822 uint32_t memoryTypeBits = UINT32_MAX;
2823 {
2824 VkBuffer dummyBuffer;
2825 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002826 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002827
2828 VkMemoryRequirements memReq;
2829 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2830 memoryTypeBits = memReq.memoryTypeBits;
2831
2832 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2833 }
2834
2835 VmaAllocationCreateInfo poolAllocInfo = {};
2836 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2837 uint32_t memTypeIndex;
2838 res = vmaFindMemoryTypeIndex(
2839 g_hAllocator,
2840 memoryTypeBits,
2841 &poolAllocInfo,
2842 &memTypeIndex);
2843
2844 VmaPoolCreateInfo poolCreateInfo = {};
2845 poolCreateInfo.memoryTypeIndex = memTypeIndex;
2846 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
2847 poolCreateInfo.minBlockCount = 1;
2848 poolCreateInfo.maxBlockCount = 4;
2849 poolCreateInfo.frameInUseCount = 0;
2850
2851 VmaPool pool;
2852 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002853 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002854
2855 vmaSetCurrentFrameIndex(g_hAllocator, 1);
2856
2857 VmaAllocationCreateInfo allocInfo = {};
2858 allocInfo.pool = pool;
2859 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
2860 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2861
2862 struct BufItem
2863 {
2864 VkBuffer Buf;
2865 VmaAllocation Alloc;
2866 };
2867 std::vector<BufItem> items;
2868
2869 // Fill entire pool.
2870 for(size_t i = 0; i < BUF_COUNT; ++i)
2871 {
2872 BufItem item;
2873 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002874 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002875 items.push_back(item);
2876 }
2877
2878 // Make sure that another allocation would fail.
2879 {
2880 BufItem item;
2881 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002882 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002883 }
2884
2885 // Validate that no buffer is lost. Also check that they are not mapped.
2886 for(size_t i = 0; i < items.size(); ++i)
2887 {
2888 VmaAllocationInfo allocInfo;
2889 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002890 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
2891 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002892 }
2893
2894 // Free some percent of random items.
2895 {
2896 const size_t PERCENT_TO_FREE = 10;
2897 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
2898 for(size_t i = 0; i < itemsToFree; ++i)
2899 {
2900 size_t index = (size_t)rand.Generate() % items.size();
2901 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
2902 items.erase(items.begin() + index);
2903 }
2904 }
2905
2906 // Randomly allocate and free items.
2907 {
2908 const size_t OPERATION_COUNT = BUF_COUNT;
2909 for(size_t i = 0; i < OPERATION_COUNT; ++i)
2910 {
2911 bool allocate = rand.Generate() % 2 != 0;
2912 if(allocate)
2913 {
2914 if(items.size() < BUF_COUNT)
2915 {
2916 BufItem item;
2917 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002918 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002919 items.push_back(item);
2920 }
2921 }
2922 else // Free
2923 {
2924 if(!items.empty())
2925 {
2926 size_t index = (size_t)rand.Generate() % items.size();
2927 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
2928 items.erase(items.begin() + index);
2929 }
2930 }
2931 }
2932 }
2933
2934 // Allocate up to maximum.
2935 while(items.size() < BUF_COUNT)
2936 {
2937 BufItem item;
2938 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002939 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002940 items.push_back(item);
2941 }
2942
2943 // Validate that no buffer is lost.
2944 for(size_t i = 0; i < items.size(); ++i)
2945 {
2946 VmaAllocationInfo allocInfo;
2947 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002948 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002949 }
2950
2951 // Next frame.
2952 vmaSetCurrentFrameIndex(g_hAllocator, 2);
2953
2954 // Allocate another BUF_COUNT buffers.
2955 for(size_t i = 0; i < BUF_COUNT; ++i)
2956 {
2957 BufItem item;
2958 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002959 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002960 items.push_back(item);
2961 }
2962
2963 // Make sure the first BUF_COUNT is lost. Delete them.
2964 for(size_t i = 0; i < BUF_COUNT; ++i)
2965 {
2966 VmaAllocationInfo allocInfo;
2967 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002968 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002969 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
2970 }
2971 items.erase(items.begin(), items.begin() + BUF_COUNT);
2972
2973 // Validate that no buffer is lost.
2974 for(size_t i = 0; i < items.size(); ++i)
2975 {
2976 VmaAllocationInfo allocInfo;
2977 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002978 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002979 }
2980
2981 // Free one item.
2982 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
2983 items.pop_back();
2984
2985 // Validate statistics.
2986 {
2987 VmaPoolStats poolStats = {};
2988 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002989 TEST(poolStats.allocationCount == items.size());
2990 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
2991 TEST(poolStats.unusedRangeCount == 1);
2992 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
2993 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002994 }
2995
2996 // Free all remaining items.
2997 for(size_t i = items.size(); i--; )
2998 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
2999 items.clear();
3000
3001 // Allocate maximum items again.
3002 for(size_t i = 0; i < BUF_COUNT; ++i)
3003 {
3004 BufItem item;
3005 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003006 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003007 items.push_back(item);
3008 }
3009
3010 // Delete every other item.
3011 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3012 {
3013 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3014 items.erase(items.begin() + i);
3015 }
3016
3017 // Defragment!
3018 {
3019 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3020 for(size_t i = 0; i < items.size(); ++i)
3021 allocationsToDefragment[i] = items[i].Alloc;
3022
3023 VmaDefragmentationStats defragmentationStats;
3024 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003025 TEST(res == VK_SUCCESS);
3026 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003027 }
3028
3029 // Free all remaining items.
3030 for(size_t i = items.size(); i--; )
3031 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3032 items.clear();
3033
3034 ////////////////////////////////////////////////////////////////////////////////
3035 // Test for vmaMakePoolAllocationsLost
3036
3037 // Allocate 4 buffers on frame 10.
3038 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3039 for(size_t i = 0; i < 4; ++i)
3040 {
3041 BufItem item;
3042 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003043 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003044 items.push_back(item);
3045 }
3046
3047 // Touch first 2 of them on frame 11.
3048 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3049 for(size_t i = 0; i < 2; ++i)
3050 {
3051 VmaAllocationInfo allocInfo;
3052 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3053 }
3054
3055 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3056 size_t lostCount = 0xDEADC0DE;
3057 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003058 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003059
3060 // Make another call. Now 0 should be lost.
3061 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003062 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003063
3064 // Make another call, with null count. Should not crash.
3065 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3066
3067 // END: Free all remaining items.
3068 for(size_t i = items.size(); i--; )
3069 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3070
3071 items.clear();
3072
Adam Sawickid2924172018-06-11 12:48:46 +02003073 ////////////////////////////////////////////////////////////////////////////////
3074 // Test for allocation too large for pool
3075
3076 {
3077 VmaAllocationCreateInfo allocCreateInfo = {};
3078 allocCreateInfo.pool = pool;
3079
3080 VkMemoryRequirements memReq;
3081 memReq.memoryTypeBits = UINT32_MAX;
3082 memReq.alignment = 1;
3083 memReq.size = poolCreateInfo.blockSize + 4;
3084
3085 VmaAllocation alloc = nullptr;
3086 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003087 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003088 }
3089
Adam Sawickib8333fb2018-03-13 16:15:53 +01003090 vmaDestroyPool(g_hAllocator, pool);
3091}
3092
Adam Sawickie44c6262018-06-15 14:30:39 +02003093static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3094{
3095 const uint8_t* pBytes = (const uint8_t*)pMemory;
3096 for(size_t i = 0; i < size; ++i)
3097 {
3098 if(pBytes[i] != pattern)
3099 {
3100 return false;
3101 }
3102 }
3103 return true;
3104}
3105
3106static void TestAllocationsInitialization()
3107{
3108 VkResult res;
3109
3110 const size_t BUF_SIZE = 1024;
3111
3112 // Create pool.
3113
3114 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3115 bufInfo.size = BUF_SIZE;
3116 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3117
3118 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3119 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3120
3121 VmaPoolCreateInfo poolCreateInfo = {};
3122 poolCreateInfo.blockSize = BUF_SIZE * 10;
3123 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3124 poolCreateInfo.maxBlockCount = 1;
3125 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003126 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003127
3128 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3129 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003130 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003131
3132 // Create one persistently mapped buffer to keep memory of this block mapped,
3133 // so that pointer to mapped data will remain (more or less...) valid even
3134 // after destruction of other allocations.
3135
3136 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3137 VkBuffer firstBuf;
3138 VmaAllocation firstAlloc;
3139 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003140 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003141
3142 // Test buffers.
3143
3144 for(uint32_t i = 0; i < 2; ++i)
3145 {
3146 const bool persistentlyMapped = i == 0;
3147 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3148 VkBuffer buf;
3149 VmaAllocation alloc;
3150 VmaAllocationInfo allocInfo;
3151 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003152 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003153
3154 void* pMappedData;
3155 if(!persistentlyMapped)
3156 {
3157 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003158 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003159 }
3160 else
3161 {
3162 pMappedData = allocInfo.pMappedData;
3163 }
3164
3165 // Validate initialized content
3166 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003167 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003168
3169 if(!persistentlyMapped)
3170 {
3171 vmaUnmapMemory(g_hAllocator, alloc);
3172 }
3173
3174 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3175
3176 // Validate freed content
3177 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003178 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003179 }
3180
3181 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3182 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3183}
3184
Adam Sawickib8333fb2018-03-13 16:15:53 +01003185static void TestPool_Benchmark(
3186 PoolTestResult& outResult,
3187 const PoolTestConfig& config)
3188{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003189 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003190
3191 RandomNumberGenerator mainRand{config.RandSeed};
3192
3193 uint32_t allocationSizeProbabilitySum = std::accumulate(
3194 config.AllocationSizes.begin(),
3195 config.AllocationSizes.end(),
3196 0u,
3197 [](uint32_t sum, const AllocationSize& allocSize) {
3198 return sum + allocSize.Probability;
3199 });
3200
3201 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3202 bufferInfo.size = 256; // Whatever.
3203 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3204
3205 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3206 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3207 imageInfo.extent.width = 256; // Whatever.
3208 imageInfo.extent.height = 256; // Whatever.
3209 imageInfo.extent.depth = 1;
3210 imageInfo.mipLevels = 1;
3211 imageInfo.arrayLayers = 1;
3212 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3213 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3214 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3215 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3216 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3217
3218 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3219 {
3220 VkBuffer dummyBuffer;
3221 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003222 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003223
3224 VkMemoryRequirements memReq;
3225 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3226 bufferMemoryTypeBits = memReq.memoryTypeBits;
3227
3228 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3229 }
3230
3231 uint32_t imageMemoryTypeBits = UINT32_MAX;
3232 {
3233 VkImage dummyImage;
3234 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003235 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003236
3237 VkMemoryRequirements memReq;
3238 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3239 imageMemoryTypeBits = memReq.memoryTypeBits;
3240
3241 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3242 }
3243
3244 uint32_t memoryTypeBits = 0;
3245 if(config.UsesBuffers() && config.UsesImages())
3246 {
3247 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3248 if(memoryTypeBits == 0)
3249 {
3250 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3251 return;
3252 }
3253 }
3254 else if(config.UsesBuffers())
3255 memoryTypeBits = bufferMemoryTypeBits;
3256 else if(config.UsesImages())
3257 memoryTypeBits = imageMemoryTypeBits;
3258 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003259 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003260
3261 VmaPoolCreateInfo poolCreateInfo = {};
3262 poolCreateInfo.memoryTypeIndex = 0;
3263 poolCreateInfo.minBlockCount = 1;
3264 poolCreateInfo.maxBlockCount = 1;
3265 poolCreateInfo.blockSize = config.PoolSize;
3266 poolCreateInfo.frameInUseCount = 1;
3267
3268 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3269 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3270 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3271
3272 VmaPool pool;
3273 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003274 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003275
3276 // Start time measurement - after creating pool and initializing data structures.
3277 time_point timeBeg = std::chrono::high_resolution_clock::now();
3278
3279 ////////////////////////////////////////////////////////////////////////////////
3280 // ThreadProc
3281 auto ThreadProc = [&](
3282 PoolTestThreadResult* outThreadResult,
3283 uint32_t randSeed,
3284 HANDLE frameStartEvent,
3285 HANDLE frameEndEvent) -> void
3286 {
3287 RandomNumberGenerator threadRand{randSeed};
3288
3289 outThreadResult->AllocationTimeMin = duration::max();
3290 outThreadResult->AllocationTimeSum = duration::zero();
3291 outThreadResult->AllocationTimeMax = duration::min();
3292 outThreadResult->DeallocationTimeMin = duration::max();
3293 outThreadResult->DeallocationTimeSum = duration::zero();
3294 outThreadResult->DeallocationTimeMax = duration::min();
3295 outThreadResult->AllocationCount = 0;
3296 outThreadResult->DeallocationCount = 0;
3297 outThreadResult->LostAllocationCount = 0;
3298 outThreadResult->LostAllocationTotalSize = 0;
3299 outThreadResult->FailedAllocationCount = 0;
3300 outThreadResult->FailedAllocationTotalSize = 0;
3301
3302 struct Item
3303 {
3304 VkDeviceSize BufferSize;
3305 VkExtent2D ImageSize;
3306 VkBuffer Buf;
3307 VkImage Image;
3308 VmaAllocation Alloc;
3309
3310 VkDeviceSize CalcSizeBytes() const
3311 {
3312 return BufferSize +
3313 ImageSize.width * ImageSize.height * 4;
3314 }
3315 };
3316 std::vector<Item> unusedItems, usedItems;
3317
3318 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3319
3320 // Create all items - all unused, not yet allocated.
3321 for(size_t i = 0; i < threadTotalItemCount; ++i)
3322 {
3323 Item item = {};
3324
3325 uint32_t allocSizeIndex = 0;
3326 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3327 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3328 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3329
3330 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3331 if(allocSize.BufferSizeMax > 0)
3332 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003333 TEST(allocSize.BufferSizeMin > 0);
3334 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003335 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3336 item.BufferSize = allocSize.BufferSizeMin;
3337 else
3338 {
3339 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3340 item.BufferSize = item.BufferSize / 16 * 16;
3341 }
3342 }
3343 else
3344 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003345 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003346 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3347 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3348 else
3349 {
3350 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3351 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3352 }
3353 }
3354
3355 unusedItems.push_back(item);
3356 }
3357
3358 auto Allocate = [&](Item& item) -> VkResult
3359 {
3360 VmaAllocationCreateInfo allocCreateInfo = {};
3361 allocCreateInfo.pool = pool;
3362 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3363 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3364
3365 if(item.BufferSize)
3366 {
3367 bufferInfo.size = item.BufferSize;
3368 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3369 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3370 }
3371 else
3372 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003373 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003374
3375 imageInfo.extent.width = item.ImageSize.width;
3376 imageInfo.extent.height = item.ImageSize.height;
3377 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3378 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3379 }
3380 };
3381
3382 ////////////////////////////////////////////////////////////////////////////////
3383 // Frames
3384 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3385 {
3386 WaitForSingleObject(frameStartEvent, INFINITE);
3387
3388 // Always make some percent of used bufs unused, to choose different used ones.
3389 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3390 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3391 {
3392 size_t index = threadRand.Generate() % usedItems.size();
3393 unusedItems.push_back(usedItems[index]);
3394 usedItems.erase(usedItems.begin() + index);
3395 }
3396
3397 // Determine which bufs we want to use in this frame.
3398 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3399 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003400 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003401 // Move some used to unused.
3402 while(usedBufCount < usedItems.size())
3403 {
3404 size_t index = threadRand.Generate() % usedItems.size();
3405 unusedItems.push_back(usedItems[index]);
3406 usedItems.erase(usedItems.begin() + index);
3407 }
3408 // Move some unused to used.
3409 while(usedBufCount > usedItems.size())
3410 {
3411 size_t index = threadRand.Generate() % unusedItems.size();
3412 usedItems.push_back(unusedItems[index]);
3413 unusedItems.erase(unusedItems.begin() + index);
3414 }
3415
3416 uint32_t touchExistingCount = 0;
3417 uint32_t touchLostCount = 0;
3418 uint32_t createSucceededCount = 0;
3419 uint32_t createFailedCount = 0;
3420
3421 // Touch all used bufs. If not created or lost, allocate.
3422 for(size_t i = 0; i < usedItems.size(); ++i)
3423 {
3424 Item& item = usedItems[i];
3425 // Not yet created.
3426 if(item.Alloc == VK_NULL_HANDLE)
3427 {
3428 res = Allocate(item);
3429 ++outThreadResult->AllocationCount;
3430 if(res != VK_SUCCESS)
3431 {
3432 item.Alloc = VK_NULL_HANDLE;
3433 item.Buf = VK_NULL_HANDLE;
3434 ++outThreadResult->FailedAllocationCount;
3435 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3436 ++createFailedCount;
3437 }
3438 else
3439 ++createSucceededCount;
3440 }
3441 else
3442 {
3443 // Touch.
3444 VmaAllocationInfo allocInfo;
3445 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3446 // Lost.
3447 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3448 {
3449 ++touchLostCount;
3450
3451 // Destroy.
3452 {
3453 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3454 if(item.Buf)
3455 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3456 else
3457 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3458 ++outThreadResult->DeallocationCount;
3459 }
3460 item.Alloc = VK_NULL_HANDLE;
3461 item.Buf = VK_NULL_HANDLE;
3462
3463 ++outThreadResult->LostAllocationCount;
3464 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3465
3466 // Recreate.
3467 res = Allocate(item);
3468 ++outThreadResult->AllocationCount;
3469 // Creation failed.
3470 if(res != VK_SUCCESS)
3471 {
3472 ++outThreadResult->FailedAllocationCount;
3473 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3474 ++createFailedCount;
3475 }
3476 else
3477 ++createSucceededCount;
3478 }
3479 else
3480 ++touchExistingCount;
3481 }
3482 }
3483
3484 /*
3485 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3486 randSeed, frameIndex,
3487 touchExistingCount, touchLostCount,
3488 createSucceededCount, createFailedCount);
3489 */
3490
3491 SetEvent(frameEndEvent);
3492 }
3493
3494 // Free all remaining items.
3495 for(size_t i = usedItems.size(); i--; )
3496 {
3497 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3498 if(usedItems[i].Buf)
3499 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3500 else
3501 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3502 ++outThreadResult->DeallocationCount;
3503 }
3504 for(size_t i = unusedItems.size(); i--; )
3505 {
3506 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3507 if(unusedItems[i].Buf)
3508 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3509 else
3510 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3511 ++outThreadResult->DeallocationCount;
3512 }
3513 };
3514
3515 // Launch threads.
3516 uint32_t threadRandSeed = mainRand.Generate();
3517 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3518 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3519 std::vector<std::thread> bkgThreads;
3520 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3521 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3522 {
3523 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3524 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3525 bkgThreads.emplace_back(std::bind(
3526 ThreadProc,
3527 &threadResults[threadIndex],
3528 threadRandSeed + threadIndex,
3529 frameStartEvents[threadIndex],
3530 frameEndEvents[threadIndex]));
3531 }
3532
3533 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003534 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003535 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3536 {
3537 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3538 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3539 SetEvent(frameStartEvents[threadIndex]);
3540 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3541 }
3542
3543 // Wait for threads finished
3544 for(size_t i = 0; i < bkgThreads.size(); ++i)
3545 {
3546 bkgThreads[i].join();
3547 CloseHandle(frameEndEvents[i]);
3548 CloseHandle(frameStartEvents[i]);
3549 }
3550 bkgThreads.clear();
3551
3552 // Finish time measurement - before destroying pool.
3553 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3554
3555 vmaDestroyPool(g_hAllocator, pool);
3556
3557 outResult.AllocationTimeMin = duration::max();
3558 outResult.AllocationTimeAvg = duration::zero();
3559 outResult.AllocationTimeMax = duration::min();
3560 outResult.DeallocationTimeMin = duration::max();
3561 outResult.DeallocationTimeAvg = duration::zero();
3562 outResult.DeallocationTimeMax = duration::min();
3563 outResult.LostAllocationCount = 0;
3564 outResult.LostAllocationTotalSize = 0;
3565 outResult.FailedAllocationCount = 0;
3566 outResult.FailedAllocationTotalSize = 0;
3567 size_t allocationCount = 0;
3568 size_t deallocationCount = 0;
3569 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3570 {
3571 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3572 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3573 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3574 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3575 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3576 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3577 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3578 allocationCount += threadResult.AllocationCount;
3579 deallocationCount += threadResult.DeallocationCount;
3580 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3581 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3582 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3583 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3584 }
3585 if(allocationCount)
3586 outResult.AllocationTimeAvg /= allocationCount;
3587 if(deallocationCount)
3588 outResult.DeallocationTimeAvg /= deallocationCount;
3589}
3590
3591static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3592{
3593 if(ptr1 < ptr2)
3594 return ptr1 + size1 > ptr2;
3595 else if(ptr2 < ptr1)
3596 return ptr2 + size2 > ptr1;
3597 else
3598 return true;
3599}
3600
3601static void TestMapping()
3602{
3603 wprintf(L"Testing mapping...\n");
3604
3605 VkResult res;
3606 uint32_t memTypeIndex = UINT32_MAX;
3607
3608 enum TEST
3609 {
3610 TEST_NORMAL,
3611 TEST_POOL,
3612 TEST_DEDICATED,
3613 TEST_COUNT
3614 };
3615 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3616 {
3617 VmaPool pool = nullptr;
3618 if(testIndex == TEST_POOL)
3619 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003620 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003621 VmaPoolCreateInfo poolInfo = {};
3622 poolInfo.memoryTypeIndex = memTypeIndex;
3623 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003624 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003625 }
3626
3627 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3628 bufInfo.size = 0x10000;
3629 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3630
3631 VmaAllocationCreateInfo allocCreateInfo = {};
3632 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3633 allocCreateInfo.pool = pool;
3634 if(testIndex == TEST_DEDICATED)
3635 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3636
3637 VmaAllocationInfo allocInfo;
3638
3639 // Mapped manually
3640
3641 // Create 2 buffers.
3642 BufferInfo bufferInfos[3];
3643 for(size_t i = 0; i < 2; ++i)
3644 {
3645 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3646 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003647 TEST(res == VK_SUCCESS);
3648 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003649 memTypeIndex = allocInfo.memoryType;
3650 }
3651
3652 // Map buffer 0.
3653 char* data00 = nullptr;
3654 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003655 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003656 data00[0xFFFF] = data00[0];
3657
3658 // Map buffer 0 second time.
3659 char* data01 = nullptr;
3660 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003661 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003662
3663 // Map buffer 1.
3664 char* data1 = nullptr;
3665 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003666 TEST(res == VK_SUCCESS && data1 != nullptr);
3667 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01003668 data1[0xFFFF] = data1[0];
3669
3670 // Unmap buffer 0 two times.
3671 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3672 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3673 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003674 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003675
3676 // Unmap buffer 1.
3677 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
3678 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003679 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003680
3681 // Create 3rd buffer - persistently mapped.
3682 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3683 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3684 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003685 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003686
3687 // Map buffer 2.
3688 char* data2 = nullptr;
3689 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003690 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003691 data2[0xFFFF] = data2[0];
3692
3693 // Unmap buffer 2.
3694 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
3695 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003696 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003697
3698 // Destroy all buffers.
3699 for(size_t i = 3; i--; )
3700 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
3701
3702 vmaDestroyPool(g_hAllocator, pool);
3703 }
3704}
3705
3706static void TestMappingMultithreaded()
3707{
3708 wprintf(L"Testing mapping multithreaded...\n");
3709
3710 static const uint32_t threadCount = 16;
3711 static const uint32_t bufferCount = 1024;
3712 static const uint32_t threadBufferCount = bufferCount / threadCount;
3713
3714 VkResult res;
3715 volatile uint32_t memTypeIndex = UINT32_MAX;
3716
3717 enum TEST
3718 {
3719 TEST_NORMAL,
3720 TEST_POOL,
3721 TEST_DEDICATED,
3722 TEST_COUNT
3723 };
3724 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3725 {
3726 VmaPool pool = nullptr;
3727 if(testIndex == TEST_POOL)
3728 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003729 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003730 VmaPoolCreateInfo poolInfo = {};
3731 poolInfo.memoryTypeIndex = memTypeIndex;
3732 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003733 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003734 }
3735
3736 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3737 bufCreateInfo.size = 0x10000;
3738 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3739
3740 VmaAllocationCreateInfo allocCreateInfo = {};
3741 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3742 allocCreateInfo.pool = pool;
3743 if(testIndex == TEST_DEDICATED)
3744 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3745
3746 std::thread threads[threadCount];
3747 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
3748 {
3749 threads[threadIndex] = std::thread([=, &memTypeIndex](){
3750 // ======== THREAD FUNCTION ========
3751
3752 RandomNumberGenerator rand{threadIndex};
3753
3754 enum class MODE
3755 {
3756 // Don't map this buffer at all.
3757 DONT_MAP,
3758 // Map and quickly unmap.
3759 MAP_FOR_MOMENT,
3760 // Map and unmap before destruction.
3761 MAP_FOR_LONGER,
3762 // Map two times. Quickly unmap, second unmap before destruction.
3763 MAP_TWO_TIMES,
3764 // Create this buffer as persistently mapped.
3765 PERSISTENTLY_MAPPED,
3766 COUNT
3767 };
3768 std::vector<BufferInfo> bufInfos{threadBufferCount};
3769 std::vector<MODE> bufModes{threadBufferCount};
3770
3771 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
3772 {
3773 BufferInfo& bufInfo = bufInfos[bufferIndex];
3774 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
3775 bufModes[bufferIndex] = mode;
3776
3777 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
3778 if(mode == MODE::PERSISTENTLY_MAPPED)
3779 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3780
3781 VmaAllocationInfo allocInfo;
3782 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
3783 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003784 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003785
3786 if(memTypeIndex == UINT32_MAX)
3787 memTypeIndex = allocInfo.memoryType;
3788
3789 char* data = nullptr;
3790
3791 if(mode == MODE::PERSISTENTLY_MAPPED)
3792 {
3793 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003794 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003795 }
3796 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
3797 mode == MODE::MAP_TWO_TIMES)
3798 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003799 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003800 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003801 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003802
3803 if(mode == MODE::MAP_TWO_TIMES)
3804 {
3805 char* data2 = nullptr;
3806 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003807 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003808 }
3809 }
3810 else if(mode == MODE::DONT_MAP)
3811 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003812 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003813 }
3814 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003815 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003816
3817 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
3818 if(data)
3819 data[0xFFFF] = data[0];
3820
3821 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
3822 {
3823 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
3824
3825 VmaAllocationInfo allocInfo;
3826 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
3827 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02003828 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003829 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003830 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003831 }
3832
3833 switch(rand.Generate() % 3)
3834 {
3835 case 0: Sleep(0); break; // Yield.
3836 case 1: Sleep(10); break; // 10 ms
3837 // default: No sleep.
3838 }
3839
3840 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
3841 if(data)
3842 data[0xFFFF] = data[0];
3843 }
3844
3845 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
3846 {
3847 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
3848 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
3849 {
3850 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
3851
3852 VmaAllocationInfo allocInfo;
3853 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003854 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003855 }
3856
3857 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
3858 }
3859 });
3860 }
3861
3862 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
3863 threads[threadIndex].join();
3864
3865 vmaDestroyPool(g_hAllocator, pool);
3866 }
3867}
3868
3869static void WriteMainTestResultHeader(FILE* file)
3870{
3871 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02003872 "Code,Time,"
3873 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01003874 "Total Time (us),"
3875 "Allocation Time Min (us),"
3876 "Allocation Time Avg (us),"
3877 "Allocation Time Max (us),"
3878 "Deallocation Time Min (us),"
3879 "Deallocation Time Avg (us),"
3880 "Deallocation Time Max (us),"
3881 "Total Memory Allocated (B),"
3882 "Free Range Size Avg (B),"
3883 "Free Range Size Max (B)\n");
3884}
3885
3886static void WriteMainTestResult(
3887 FILE* file,
3888 const char* codeDescription,
3889 const char* testDescription,
3890 const Config& config, const Result& result)
3891{
3892 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
3893 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
3894 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
3895 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
3896 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
3897 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
3898 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
3899
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003900 std::string currTime;
3901 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003902
3903 fprintf(file,
3904 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01003905 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
3906 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003907 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02003908 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01003909 totalTimeSeconds * 1e6f,
3910 allocationTimeMinSeconds * 1e6f,
3911 allocationTimeAvgSeconds * 1e6f,
3912 allocationTimeMaxSeconds * 1e6f,
3913 deallocationTimeMinSeconds * 1e6f,
3914 deallocationTimeAvgSeconds * 1e6f,
3915 deallocationTimeMaxSeconds * 1e6f,
3916 result.TotalMemoryAllocated,
3917 result.FreeRangeSizeAvg,
3918 result.FreeRangeSizeMax);
3919}
3920
3921static void WritePoolTestResultHeader(FILE* file)
3922{
3923 fprintf(file,
3924 "Code,Test,Time,"
3925 "Config,"
3926 "Total Time (us),"
3927 "Allocation Time Min (us),"
3928 "Allocation Time Avg (us),"
3929 "Allocation Time Max (us),"
3930 "Deallocation Time Min (us),"
3931 "Deallocation Time Avg (us),"
3932 "Deallocation Time Max (us),"
3933 "Lost Allocation Count,"
3934 "Lost Allocation Total Size (B),"
3935 "Failed Allocation Count,"
3936 "Failed Allocation Total Size (B)\n");
3937}
3938
3939static void WritePoolTestResult(
3940 FILE* file,
3941 const char* codeDescription,
3942 const char* testDescription,
3943 const PoolTestConfig& config,
3944 const PoolTestResult& result)
3945{
3946 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
3947 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
3948 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
3949 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
3950 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
3951 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
3952 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
3953
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003954 std::string currTime;
3955 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003956
3957 fprintf(file,
3958 "%s,%s,%s,"
3959 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
3960 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
3961 // General
3962 codeDescription,
3963 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003964 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01003965 // Config
3966 config.ThreadCount,
3967 (unsigned long long)config.PoolSize,
3968 config.FrameCount,
3969 config.TotalItemCount,
3970 config.UsedItemCountMin,
3971 config.UsedItemCountMax,
3972 config.ItemsToMakeUnusedPercent,
3973 // Results
3974 totalTimeSeconds * 1e6f,
3975 allocationTimeMinSeconds * 1e6f,
3976 allocationTimeAvgSeconds * 1e6f,
3977 allocationTimeMaxSeconds * 1e6f,
3978 deallocationTimeMinSeconds * 1e6f,
3979 deallocationTimeAvgSeconds * 1e6f,
3980 deallocationTimeMaxSeconds * 1e6f,
3981 result.LostAllocationCount,
3982 result.LostAllocationTotalSize,
3983 result.FailedAllocationCount,
3984 result.FailedAllocationTotalSize);
3985}
3986
3987static void PerformCustomMainTest(FILE* file)
3988{
3989 Config config{};
3990 config.RandSeed = 65735476;
3991 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
3992 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
3993 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
3994 config.FreeOrder = FREE_ORDER::FORWARD;
3995 config.ThreadCount = 16;
3996 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02003997 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01003998
3999 // Buffers
4000 //config.AllocationSizes.push_back({4, 16, 1024});
4001 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4002
4003 // Images
4004 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4005 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4006
4007 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4008 config.AdditionalOperationCount = 1024;
4009
4010 Result result{};
4011 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004012 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004013 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4014}
4015
4016static void PerformCustomPoolTest(FILE* file)
4017{
4018 PoolTestConfig config;
4019 config.PoolSize = 100 * 1024 * 1024;
4020 config.RandSeed = 2345764;
4021 config.ThreadCount = 1;
4022 config.FrameCount = 200;
4023 config.ItemsToMakeUnusedPercent = 2;
4024
4025 AllocationSize allocSize = {};
4026 allocSize.BufferSizeMin = 1024;
4027 allocSize.BufferSizeMax = 1024 * 1024;
4028 allocSize.Probability = 1;
4029 config.AllocationSizes.push_back(allocSize);
4030
4031 allocSize.BufferSizeMin = 0;
4032 allocSize.BufferSizeMax = 0;
4033 allocSize.ImageSizeMin = 128;
4034 allocSize.ImageSizeMax = 1024;
4035 allocSize.Probability = 1;
4036 config.AllocationSizes.push_back(allocSize);
4037
4038 config.PoolSize = config.CalcAvgResourceSize() * 200;
4039 config.UsedItemCountMax = 160;
4040 config.TotalItemCount = config.UsedItemCountMax * 10;
4041 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4042
4043 g_MemoryAliasingWarningEnabled = false;
4044 PoolTestResult result = {};
4045 TestPool_Benchmark(result, config);
4046 g_MemoryAliasingWarningEnabled = true;
4047
4048 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4049}
4050
Adam Sawickib8333fb2018-03-13 16:15:53 +01004051static void PerformMainTests(FILE* file)
4052{
4053 uint32_t repeatCount = 1;
4054 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4055
4056 Config config{};
4057 config.RandSeed = 65735476;
4058 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4059 config.FreeOrder = FREE_ORDER::FORWARD;
4060
4061 size_t threadCountCount = 1;
4062 switch(ConfigType)
4063 {
4064 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4065 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4066 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4067 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4068 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4069 default: assert(0);
4070 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004071
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004072 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004073
Adam Sawickib8333fb2018-03-13 16:15:53 +01004074 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4075 {
4076 std::string desc1;
4077
4078 switch(threadCountIndex)
4079 {
4080 case 0:
4081 desc1 += "1_thread";
4082 config.ThreadCount = 1;
4083 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4084 break;
4085 case 1:
4086 desc1 += "16_threads+0%_common";
4087 config.ThreadCount = 16;
4088 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4089 break;
4090 case 2:
4091 desc1 += "16_threads+50%_common";
4092 config.ThreadCount = 16;
4093 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4094 break;
4095 case 3:
4096 desc1 += "16_threads+100%_common";
4097 config.ThreadCount = 16;
4098 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4099 break;
4100 case 4:
4101 desc1 += "2_threads+0%_common";
4102 config.ThreadCount = 2;
4103 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4104 break;
4105 case 5:
4106 desc1 += "2_threads+50%_common";
4107 config.ThreadCount = 2;
4108 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4109 break;
4110 case 6:
4111 desc1 += "2_threads+100%_common";
4112 config.ThreadCount = 2;
4113 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4114 break;
4115 default:
4116 assert(0);
4117 }
4118
4119 // 0 = buffers, 1 = images, 2 = buffers and images
4120 size_t buffersVsImagesCount = 2;
4121 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4122 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4123 {
4124 std::string desc2 = desc1;
4125 switch(buffersVsImagesIndex)
4126 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004127 case 0: desc2 += ",Buffers"; break;
4128 case 1: desc2 += ",Images"; break;
4129 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004130 default: assert(0);
4131 }
4132
4133 // 0 = small, 1 = large, 2 = small and large
4134 size_t smallVsLargeCount = 2;
4135 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4136 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4137 {
4138 std::string desc3 = desc2;
4139 switch(smallVsLargeIndex)
4140 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004141 case 0: desc3 += ",Small"; break;
4142 case 1: desc3 += ",Large"; break;
4143 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004144 default: assert(0);
4145 }
4146
4147 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4148 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4149 else
4150 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4151
4152 // 0 = varying sizes min...max, 1 = set of constant sizes
4153 size_t constantSizesCount = 1;
4154 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4155 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4156 {
4157 std::string desc4 = desc3;
4158 switch(constantSizesIndex)
4159 {
4160 case 0: desc4 += " Varying_sizes"; break;
4161 case 1: desc4 += " Constant_sizes"; break;
4162 default: assert(0);
4163 }
4164
4165 config.AllocationSizes.clear();
4166 // Buffers present
4167 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4168 {
4169 // Small
4170 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4171 {
4172 // Varying size
4173 if(constantSizesIndex == 0)
4174 config.AllocationSizes.push_back({4, 16, 1024});
4175 // Constant sizes
4176 else
4177 {
4178 config.AllocationSizes.push_back({1, 16, 16});
4179 config.AllocationSizes.push_back({1, 64, 64});
4180 config.AllocationSizes.push_back({1, 256, 256});
4181 config.AllocationSizes.push_back({1, 1024, 1024});
4182 }
4183 }
4184 // Large
4185 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4186 {
4187 // Varying size
4188 if(constantSizesIndex == 0)
4189 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4190 // Constant sizes
4191 else
4192 {
4193 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4194 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4195 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4196 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4197 }
4198 }
4199 }
4200 // Images present
4201 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4202 {
4203 // Small
4204 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4205 {
4206 // Varying size
4207 if(constantSizesIndex == 0)
4208 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4209 // Constant sizes
4210 else
4211 {
4212 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4213 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4214 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4215 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4216 }
4217 }
4218 // Large
4219 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4220 {
4221 // Varying size
4222 if(constantSizesIndex == 0)
4223 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4224 // Constant sizes
4225 else
4226 {
4227 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4228 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4229 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4230 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4231 }
4232 }
4233 }
4234
4235 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4236 size_t beginBytesToAllocateCount = 1;
4237 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4238 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4239 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4240 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4241 {
4242 std::string desc5 = desc4;
4243
4244 switch(beginBytesToAllocateIndex)
4245 {
4246 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004247 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004248 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4249 config.AdditionalOperationCount = 0;
4250 break;
4251 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004252 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004253 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4254 config.AdditionalOperationCount = 1024;
4255 break;
4256 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004257 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004258 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4259 config.AdditionalOperationCount = 1024;
4260 break;
4261 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004262 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004263 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4264 config.AdditionalOperationCount = 1024;
4265 break;
4266 default:
4267 assert(0);
4268 }
4269
Adam Sawicki0667e332018-08-24 17:26:44 +02004270 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004271 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004272 std::string desc6 = desc5;
4273 switch(strategyIndex)
4274 {
4275 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004276 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004277 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4278 break;
4279 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004280 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004281 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4282 break;
4283 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004284 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004285 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4286 break;
4287 default:
4288 assert(0);
4289 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004290
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004291 desc6 += ',';
4292 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004293
4294 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004295
4296 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4297 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004298 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004299
4300 Result result{};
4301 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004302 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004303 if(file)
4304 {
4305 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4306 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004307 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004308 }
4309 }
4310 }
4311 }
4312 }
4313 }
4314}
4315
4316static void PerformPoolTests(FILE* file)
4317{
4318 const size_t AVG_RESOURCES_PER_POOL = 300;
4319
4320 uint32_t repeatCount = 1;
4321 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4322
4323 PoolTestConfig config{};
4324 config.RandSeed = 2346343;
4325 config.FrameCount = 200;
4326 config.ItemsToMakeUnusedPercent = 2;
4327
4328 size_t threadCountCount = 1;
4329 switch(ConfigType)
4330 {
4331 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4332 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4333 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4334 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4335 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4336 default: assert(0);
4337 }
4338 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4339 {
4340 std::string desc1;
4341
4342 switch(threadCountIndex)
4343 {
4344 case 0:
4345 desc1 += "1_thread";
4346 config.ThreadCount = 1;
4347 break;
4348 case 1:
4349 desc1 += "16_threads";
4350 config.ThreadCount = 16;
4351 break;
4352 case 2:
4353 desc1 += "2_threads";
4354 config.ThreadCount = 2;
4355 break;
4356 default:
4357 assert(0);
4358 }
4359
4360 // 0 = buffers, 1 = images, 2 = buffers and images
4361 size_t buffersVsImagesCount = 2;
4362 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4363 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4364 {
4365 std::string desc2 = desc1;
4366 switch(buffersVsImagesIndex)
4367 {
4368 case 0: desc2 += " Buffers"; break;
4369 case 1: desc2 += " Images"; break;
4370 case 2: desc2 += " Buffers+Images"; break;
4371 default: assert(0);
4372 }
4373
4374 // 0 = small, 1 = large, 2 = small and large
4375 size_t smallVsLargeCount = 2;
4376 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4377 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4378 {
4379 std::string desc3 = desc2;
4380 switch(smallVsLargeIndex)
4381 {
4382 case 0: desc3 += " Small"; break;
4383 case 1: desc3 += " Large"; break;
4384 case 2: desc3 += " Small+Large"; break;
4385 default: assert(0);
4386 }
4387
4388 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4389 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4390 else
4391 config.PoolSize = 4ull * 1024 * 1024;
4392
4393 // 0 = varying sizes min...max, 1 = set of constant sizes
4394 size_t constantSizesCount = 1;
4395 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4396 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4397 {
4398 std::string desc4 = desc3;
4399 switch(constantSizesIndex)
4400 {
4401 case 0: desc4 += " Varying_sizes"; break;
4402 case 1: desc4 += " Constant_sizes"; break;
4403 default: assert(0);
4404 }
4405
4406 config.AllocationSizes.clear();
4407 // Buffers present
4408 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4409 {
4410 // Small
4411 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4412 {
4413 // Varying size
4414 if(constantSizesIndex == 0)
4415 config.AllocationSizes.push_back({4, 16, 1024});
4416 // Constant sizes
4417 else
4418 {
4419 config.AllocationSizes.push_back({1, 16, 16});
4420 config.AllocationSizes.push_back({1, 64, 64});
4421 config.AllocationSizes.push_back({1, 256, 256});
4422 config.AllocationSizes.push_back({1, 1024, 1024});
4423 }
4424 }
4425 // Large
4426 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4427 {
4428 // Varying size
4429 if(constantSizesIndex == 0)
4430 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4431 // Constant sizes
4432 else
4433 {
4434 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4435 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4436 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4437 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4438 }
4439 }
4440 }
4441 // Images present
4442 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4443 {
4444 // Small
4445 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4446 {
4447 // Varying size
4448 if(constantSizesIndex == 0)
4449 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4450 // Constant sizes
4451 else
4452 {
4453 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4454 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4455 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4456 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4457 }
4458 }
4459 // Large
4460 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4461 {
4462 // Varying size
4463 if(constantSizesIndex == 0)
4464 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4465 // Constant sizes
4466 else
4467 {
4468 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4469 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4470 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4471 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4472 }
4473 }
4474 }
4475
4476 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4477 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4478
4479 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4480 size_t subscriptionModeCount;
4481 switch(ConfigType)
4482 {
4483 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4484 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4485 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4486 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4487 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4488 default: assert(0);
4489 }
4490 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4491 {
4492 std::string desc5 = desc4;
4493
4494 switch(subscriptionModeIndex)
4495 {
4496 case 0:
4497 desc5 += " Subscription_66%";
4498 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4499 break;
4500 case 1:
4501 desc5 += " Subscription_133%";
4502 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4503 break;
4504 case 2:
4505 desc5 += " Subscription_100%";
4506 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4507 break;
4508 case 3:
4509 desc5 += " Subscription_33%";
4510 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4511 break;
4512 case 4:
4513 desc5 += " Subscription_166%";
4514 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4515 break;
4516 default:
4517 assert(0);
4518 }
4519
4520 config.TotalItemCount = config.UsedItemCountMax * 5;
4521 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4522
4523 const char* testDescription = desc5.c_str();
4524
4525 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4526 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004527 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004528
4529 PoolTestResult result{};
4530 g_MemoryAliasingWarningEnabled = false;
4531 TestPool_Benchmark(result, config);
4532 g_MemoryAliasingWarningEnabled = true;
4533 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4534 }
4535 }
4536 }
4537 }
4538 }
4539 }
4540}
4541
Adam Sawickia83793a2018-09-03 13:40:42 +02004542static void BasicTestBuddyAllocator()
4543{
4544 wprintf(L"Basic test buddy allocator\n");
4545
4546 RandomNumberGenerator rand{76543};
4547
4548 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4549 sampleBufCreateInfo.size = 1024; // Whatever.
4550 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4551
4552 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4553 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4554
4555 VmaPoolCreateInfo poolCreateInfo = {};
4556 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004557 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004558
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004559 // Deliberately adding 1023 to test usable size smaller than memory block size.
4560 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004561 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004562 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004563
4564 VmaPool pool = nullptr;
4565 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004566 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004567
4568 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4569
4570 VmaAllocationCreateInfo allocCreateInfo = {};
4571 allocCreateInfo.pool = pool;
4572
4573 std::vector<BufferInfo> bufInfo;
4574 BufferInfo newBufInfo;
4575 VmaAllocationInfo allocInfo;
4576
4577 bufCreateInfo.size = 1024 * 256;
4578 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4579 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004580 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004581 bufInfo.push_back(newBufInfo);
4582
4583 bufCreateInfo.size = 1024 * 512;
4584 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4585 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004586 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004587 bufInfo.push_back(newBufInfo);
4588
4589 bufCreateInfo.size = 1024 * 128;
4590 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4591 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004592 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004593 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004594
4595 // Test very small allocation, smaller than minimum node size.
4596 bufCreateInfo.size = 1;
4597 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4598 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004599 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004600 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004601
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004602 // Test some small allocation with alignment requirement.
4603 {
4604 VkMemoryRequirements memReq;
4605 memReq.alignment = 256;
4606 memReq.memoryTypeBits = UINT32_MAX;
4607 memReq.size = 32;
4608
4609 newBufInfo.Buffer = VK_NULL_HANDLE;
4610 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4611 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004612 TEST(res == VK_SUCCESS);
4613 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004614 bufInfo.push_back(newBufInfo);
4615 }
4616
4617 //SaveAllocatorStatsToFile(L"TEST.json");
4618
Adam Sawicki21017c62018-09-07 15:26:59 +02004619 VmaPoolStats stats = {};
4620 vmaGetPoolStats(g_hAllocator, pool, &stats);
4621 int DBG = 0; // Set breakpoint here to inspect `stats`.
4622
Adam Sawicki80927152018-09-07 17:27:23 +02004623 // Allocate enough new buffers to surely fall into second block.
4624 for(uint32_t i = 0; i < 32; ++i)
4625 {
4626 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4627 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4628 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004629 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004630 bufInfo.push_back(newBufInfo);
4631 }
4632
4633 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4634
Adam Sawickia83793a2018-09-03 13:40:42 +02004635 // Destroy the buffers in random order.
4636 while(!bufInfo.empty())
4637 {
4638 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4639 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4640 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4641 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4642 }
4643
4644 vmaDestroyPool(g_hAllocator, pool);
4645}
4646
Adam Sawickif2975342018-10-16 13:49:02 +02004647// Test the testing environment.
4648static void TestGpuData()
4649{
4650 RandomNumberGenerator rand = { 53434 };
4651
4652 std::vector<AllocInfo> allocInfo;
4653
4654 for(size_t i = 0; i < 100; ++i)
4655 {
4656 AllocInfo info = {};
4657
4658 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4659 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
4660 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
4661 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4662 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
4663
4664 VmaAllocationCreateInfo allocCreateInfo = {};
4665 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4666
4667 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
4668 TEST(res == VK_SUCCESS);
4669
4670 info.m_StartValue = rand.Generate();
4671
4672 allocInfo.push_back(std::move(info));
4673 }
4674
4675 UploadGpuData(allocInfo.data(), allocInfo.size());
4676
4677 ValidateGpuData(allocInfo.data(), allocInfo.size());
4678
4679 DestroyAllAllocations(allocInfo);
4680}
4681
Adam Sawickib8333fb2018-03-13 16:15:53 +01004682void Test()
4683{
4684 wprintf(L"TESTING:\n");
4685
Adam Sawickif2975342018-10-16 13:49:02 +02004686 if(true)
Adam Sawicki70a683e2018-08-24 15:36:32 +02004687 {
4688 // # Temporarily insert custom tests here
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004689 // ########################################
4690 // ########################################
Adam Sawicki80927152018-09-07 17:27:23 +02004691
Adam Sawickiff0f7b82018-10-18 14:44:05 +02004692 TestDefragmentationGpu();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004693 return;
4694 }
4695
Adam Sawickib8333fb2018-03-13 16:15:53 +01004696 // # Simple tests
4697
4698 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02004699 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02004700#if VMA_DEBUG_MARGIN
4701 TestDebugMargin();
4702#else
4703 TestPool_SameSize();
4704 TestHeapSizeLimit();
4705#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02004706#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
4707 TestAllocationsInitialization();
4708#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01004709 TestMapping();
4710 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02004711 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02004712 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02004713 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004714
Adam Sawicki4338f662018-09-07 14:12:37 +02004715 BasicTestBuddyAllocator();
4716
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004717 {
4718 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02004719 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004720 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02004721 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004722 fclose(file);
4723 }
4724
Adam Sawickib8333fb2018-03-13 16:15:53 +01004725 TestDefragmentationSimple();
4726 TestDefragmentationFull();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02004727 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01004728
4729 // # Detailed tests
4730 FILE* file;
4731 fopen_s(&file, "Results.csv", "w");
4732 assert(file != NULL);
4733
4734 WriteMainTestResultHeader(file);
4735 PerformMainTests(file);
4736 //PerformCustomMainTest(file);
4737
4738 WritePoolTestResultHeader(file);
4739 PerformPoolTests(file);
4740 //PerformCustomPoolTest(file);
4741
4742 fclose(file);
4743
4744 wprintf(L"Done.\n");
4745}
4746
Adam Sawickif1a793c2018-03-13 15:42:22 +01004747#endif // #ifdef _WIN32