blob: 7586f61a8c425bc93107f2b8d63e29fb1614362d [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
2// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawicki94ce3d72019-04-17 14:59:25 +020029#include <functional>
Adam Sawickif1a793c2018-03-13 15:42:22 +010030
31#ifdef _WIN32
32
Adam Sawicki33d2ce72018-08-27 13:59:13 +020033static const char* CODE_DESCRIPTION = "Foo";
34
Adam Sawickif2975342018-10-16 13:49:02 +020035extern VkCommandBuffer g_hTemporaryCommandBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +020036extern const VkAllocationCallbacks* g_Allocs;
Adam Sawickif2975342018-10-16 13:49:02 +020037void BeginSingleTimeCommands();
38void EndSingleTimeCommands();
39
Adam Sawickibdb89a92018-12-13 11:56:30 +010040#ifndef VMA_DEBUG_MARGIN
41 #define VMA_DEBUG_MARGIN 0
42#endif
43
Adam Sawicki0a607132018-08-24 11:18:41 +020044enum CONFIG_TYPE {
45 CONFIG_TYPE_MINIMUM,
46 CONFIG_TYPE_SMALL,
47 CONFIG_TYPE_AVERAGE,
48 CONFIG_TYPE_LARGE,
49 CONFIG_TYPE_MAXIMUM,
50 CONFIG_TYPE_COUNT
51};
52
Adam Sawickif2975342018-10-16 13:49:02 +020053static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
54//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020055
Adam Sawickib8333fb2018-03-13 16:15:53 +010056enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
57
Adam Sawicki0667e332018-08-24 17:26:44 +020058static const char* FREE_ORDER_NAMES[] = {
59 "FORWARD",
60 "BACKWARD",
61 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020062};
63
Adam Sawicki80927152018-09-07 17:27:23 +020064// Copy of internal VmaAlgorithmToStr.
65static const char* AlgorithmToStr(uint32_t algorithm)
66{
67 switch(algorithm)
68 {
69 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
70 return "Linear";
71 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
72 return "Buddy";
73 case 0:
74 return "Default";
75 default:
76 assert(0);
77 return "";
78 }
79}
80
Adam Sawickib8333fb2018-03-13 16:15:53 +010081struct AllocationSize
82{
83 uint32_t Probability;
84 VkDeviceSize BufferSizeMin, BufferSizeMax;
85 uint32_t ImageSizeMin, ImageSizeMax;
86};
87
88struct Config
89{
90 uint32_t RandSeed;
91 VkDeviceSize BeginBytesToAllocate;
92 uint32_t AdditionalOperationCount;
93 VkDeviceSize MaxBytesToAllocate;
94 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
95 std::vector<AllocationSize> AllocationSizes;
96 uint32_t ThreadCount;
97 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
98 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020099 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +0100100};
101
102struct Result
103{
104 duration TotalTime;
105 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
106 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
107 VkDeviceSize TotalMemoryAllocated;
108 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
109};
110
111void TestDefragmentationSimple();
112void TestDefragmentationFull();
113
114struct PoolTestConfig
115{
116 uint32_t RandSeed;
117 uint32_t ThreadCount;
118 VkDeviceSize PoolSize;
119 uint32_t FrameCount;
120 uint32_t TotalItemCount;
121 // Range for number of items used in each frame.
122 uint32_t UsedItemCountMin, UsedItemCountMax;
123 // Percent of items to make unused, and possibly make some others used in each frame.
124 uint32_t ItemsToMakeUnusedPercent;
125 std::vector<AllocationSize> AllocationSizes;
126
127 VkDeviceSize CalcAvgResourceSize() const
128 {
129 uint32_t probabilitySum = 0;
130 VkDeviceSize sizeSum = 0;
131 for(size_t i = 0; i < AllocationSizes.size(); ++i)
132 {
133 const AllocationSize& allocSize = AllocationSizes[i];
134 if(allocSize.BufferSizeMax > 0)
135 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
136 else
137 {
138 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
139 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
140 }
141 probabilitySum += allocSize.Probability;
142 }
143 return sizeSum / probabilitySum;
144 }
145
146 bool UsesBuffers() const
147 {
148 for(size_t i = 0; i < AllocationSizes.size(); ++i)
149 if(AllocationSizes[i].BufferSizeMax > 0)
150 return true;
151 return false;
152 }
153
154 bool UsesImages() const
155 {
156 for(size_t i = 0; i < AllocationSizes.size(); ++i)
157 if(AllocationSizes[i].ImageSizeMax > 0)
158 return true;
159 return false;
160 }
161};
162
163struct PoolTestResult
164{
165 duration TotalTime;
166 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
167 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
168 size_t LostAllocationCount, LostAllocationTotalSize;
169 size_t FailedAllocationCount, FailedAllocationTotalSize;
170};
171
172static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
173
Adam Sawicki51fa9662018-10-03 13:44:29 +0200174uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200175
Adam Sawickib8333fb2018-03-13 16:15:53 +0100176struct BufferInfo
177{
178 VkBuffer Buffer = VK_NULL_HANDLE;
179 VmaAllocation Allocation = VK_NULL_HANDLE;
180};
181
Adam Sawicki40ffe982019-10-11 15:56:02 +0200182static uint32_t MemoryTypeToHeap(uint32_t memoryTypeIndex)
183{
184 const VkPhysicalDeviceMemoryProperties* props;
185 vmaGetMemoryProperties(g_hAllocator, &props);
186 return props->memoryTypes[memoryTypeIndex].heapIndex;
187}
188
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200189static uint32_t GetAllocationStrategyCount()
190{
191 uint32_t strategyCount = 0;
192 switch(ConfigType)
193 {
194 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
195 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
196 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
197 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
198 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
199 default: assert(0);
200 }
201 return strategyCount;
202}
203
204static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
205{
206 switch(allocStrategy)
207 {
208 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
209 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
210 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
211 case 0: return "Default"; break;
212 default: assert(0); return "";
213 }
214}
215
Adam Sawickib8333fb2018-03-13 16:15:53 +0100216static void InitResult(Result& outResult)
217{
218 outResult.TotalTime = duration::zero();
219 outResult.AllocationTimeMin = duration::max();
220 outResult.AllocationTimeAvg = duration::zero();
221 outResult.AllocationTimeMax = duration::min();
222 outResult.DeallocationTimeMin = duration::max();
223 outResult.DeallocationTimeAvg = duration::zero();
224 outResult.DeallocationTimeMax = duration::min();
225 outResult.TotalMemoryAllocated = 0;
226 outResult.FreeRangeSizeAvg = 0;
227 outResult.FreeRangeSizeMax = 0;
228}
229
230class TimeRegisterObj
231{
232public:
233 TimeRegisterObj(duration& min, duration& sum, duration& max) :
234 m_Min(min),
235 m_Sum(sum),
236 m_Max(max),
237 m_TimeBeg(std::chrono::high_resolution_clock::now())
238 {
239 }
240
241 ~TimeRegisterObj()
242 {
243 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
244 m_Sum += d;
245 if(d < m_Min) m_Min = d;
246 if(d > m_Max) m_Max = d;
247 }
248
249private:
250 duration& m_Min;
251 duration& m_Sum;
252 duration& m_Max;
253 time_point m_TimeBeg;
254};
255
256struct PoolTestThreadResult
257{
258 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
259 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
260 size_t AllocationCount, DeallocationCount;
261 size_t LostAllocationCount, LostAllocationTotalSize;
262 size_t FailedAllocationCount, FailedAllocationTotalSize;
263};
264
265class AllocationTimeRegisterObj : public TimeRegisterObj
266{
267public:
268 AllocationTimeRegisterObj(Result& result) :
269 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
270 {
271 }
272};
273
274class DeallocationTimeRegisterObj : public TimeRegisterObj
275{
276public:
277 DeallocationTimeRegisterObj(Result& result) :
278 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
279 {
280 }
281};
282
283class PoolAllocationTimeRegisterObj : public TimeRegisterObj
284{
285public:
286 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
287 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
288 {
289 }
290};
291
292class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
293{
294public:
295 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
296 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
297 {
298 }
299};
300
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200301static void CurrentTimeToStr(std::string& out)
302{
303 time_t rawTime; time(&rawTime);
304 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
305 char timeStr[128];
306 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
307 out = timeStr;
308}
309
Adam Sawickib8333fb2018-03-13 16:15:53 +0100310VkResult MainTest(Result& outResult, const Config& config)
311{
312 assert(config.ThreadCount > 0);
313
314 InitResult(outResult);
315
316 RandomNumberGenerator mainRand{config.RandSeed};
317
318 time_point timeBeg = std::chrono::high_resolution_clock::now();
319
320 std::atomic<size_t> allocationCount = 0;
321 VkResult res = VK_SUCCESS;
322
323 uint32_t memUsageProbabilitySum =
324 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
325 config.MemUsageProbability[2] + config.MemUsageProbability[3];
326 assert(memUsageProbabilitySum > 0);
327
328 uint32_t allocationSizeProbabilitySum = std::accumulate(
329 config.AllocationSizes.begin(),
330 config.AllocationSizes.end(),
331 0u,
332 [](uint32_t sum, const AllocationSize& allocSize) {
333 return sum + allocSize.Probability;
334 });
335
336 struct Allocation
337 {
338 VkBuffer Buffer;
339 VkImage Image;
340 VmaAllocation Alloc;
341 };
342
343 std::vector<Allocation> commonAllocations;
344 std::mutex commonAllocationsMutex;
345
346 auto Allocate = [&](
347 VkDeviceSize bufferSize,
348 const VkExtent2D imageExtent,
349 RandomNumberGenerator& localRand,
350 VkDeviceSize& totalAllocatedBytes,
351 std::vector<Allocation>& allocations) -> VkResult
352 {
353 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
354
355 uint32_t memUsageIndex = 0;
356 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
357 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
358 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
359
360 VmaAllocationCreateInfo memReq = {};
361 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200362 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100363
364 Allocation allocation = {};
365 VmaAllocationInfo allocationInfo;
366
367 // Buffer
368 if(bufferSize > 0)
369 {
370 assert(imageExtent.width == 0);
371 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
372 bufferInfo.size = bufferSize;
373 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
374
375 {
376 AllocationTimeRegisterObj timeRegisterObj{outResult};
377 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
378 }
379 }
380 // Image
381 else
382 {
383 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
384 imageInfo.imageType = VK_IMAGE_TYPE_2D;
385 imageInfo.extent.width = imageExtent.width;
386 imageInfo.extent.height = imageExtent.height;
387 imageInfo.extent.depth = 1;
388 imageInfo.mipLevels = 1;
389 imageInfo.arrayLayers = 1;
390 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
391 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
392 VK_IMAGE_TILING_OPTIMAL :
393 VK_IMAGE_TILING_LINEAR;
394 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
395 switch(memReq.usage)
396 {
397 case VMA_MEMORY_USAGE_GPU_ONLY:
398 switch(localRand.Generate() % 3)
399 {
400 case 0:
401 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
402 break;
403 case 1:
404 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
405 break;
406 case 2:
407 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
408 break;
409 }
410 break;
411 case VMA_MEMORY_USAGE_CPU_ONLY:
412 case VMA_MEMORY_USAGE_CPU_TO_GPU:
413 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
414 break;
415 case VMA_MEMORY_USAGE_GPU_TO_CPU:
416 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
417 break;
418 }
419 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
420 imageInfo.flags = 0;
421
422 {
423 AllocationTimeRegisterObj timeRegisterObj{outResult};
424 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
425 }
426 }
427
428 if(res == VK_SUCCESS)
429 {
430 ++allocationCount;
431 totalAllocatedBytes += allocationInfo.size;
432 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
433 if(useCommonAllocations)
434 {
435 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
436 commonAllocations.push_back(allocation);
437 }
438 else
439 allocations.push_back(allocation);
440 }
441 else
442 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200443 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100444 }
445 return res;
446 };
447
448 auto GetNextAllocationSize = [&](
449 VkDeviceSize& outBufSize,
450 VkExtent2D& outImageSize,
451 RandomNumberGenerator& localRand)
452 {
453 outBufSize = 0;
454 outImageSize = {0, 0};
455
456 uint32_t allocSizeIndex = 0;
457 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
458 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
459 r -= config.AllocationSizes[allocSizeIndex++].Probability;
460
461 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
462 if(allocSize.BufferSizeMax > 0)
463 {
464 assert(allocSize.ImageSizeMax == 0);
465 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
466 outBufSize = allocSize.BufferSizeMin;
467 else
468 {
469 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
470 outBufSize = outBufSize / 16 * 16;
471 }
472 }
473 else
474 {
475 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
476 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
477 else
478 {
479 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
480 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
481 }
482 }
483 };
484
485 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
486 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
487
488 auto ThreadProc = [&](uint32_t randSeed) -> void
489 {
490 RandomNumberGenerator threadRand(randSeed);
491 VkDeviceSize threadTotalAllocatedBytes = 0;
492 std::vector<Allocation> threadAllocations;
493 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
494 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
495 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
496
497 // BEGIN ALLOCATIONS
498 for(;;)
499 {
500 VkDeviceSize bufferSize = 0;
501 VkExtent2D imageExtent = {};
502 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
503 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
504 threadBeginBytesToAllocate)
505 {
506 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
507 break;
508 }
509 else
510 break;
511 }
512
513 // ADDITIONAL ALLOCATIONS AND FREES
514 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
515 {
516 VkDeviceSize bufferSize = 0;
517 VkExtent2D imageExtent = {};
518 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
519
520 // true = allocate, false = free
521 bool allocate = threadRand.Generate() % 2 != 0;
522
523 if(allocate)
524 {
525 if(threadTotalAllocatedBytes +
526 bufferSize +
527 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
528 threadMaxBytesToAllocate)
529 {
530 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
531 break;
532 }
533 }
534 else
535 {
536 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
537 if(useCommonAllocations)
538 {
539 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
540 if(!commonAllocations.empty())
541 {
542 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
543 VmaAllocationInfo allocationInfo;
544 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
545 if(threadTotalAllocatedBytes >= allocationInfo.size)
546 {
547 DeallocationTimeRegisterObj timeRegisterObj{outResult};
548 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
549 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
550 else
551 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
552 threadTotalAllocatedBytes -= allocationInfo.size;
553 commonAllocations.erase(commonAllocations.begin() + indexToFree);
554 }
555 }
556 }
557 else
558 {
559 if(!threadAllocations.empty())
560 {
561 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
562 VmaAllocationInfo allocationInfo;
563 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
564 if(threadTotalAllocatedBytes >= allocationInfo.size)
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 threadTotalAllocatedBytes -= allocationInfo.size;
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 }
575 }
576 }
577 }
578
579 ++numThreadsReachedMaxAllocations;
580
581 WaitForSingleObject(threadsFinishEvent, INFINITE);
582
583 // DEALLOCATION
584 while(!threadAllocations.empty())
585 {
586 size_t indexToFree = 0;
587 switch(config.FreeOrder)
588 {
589 case FREE_ORDER::FORWARD:
590 indexToFree = 0;
591 break;
592 case FREE_ORDER::BACKWARD:
593 indexToFree = threadAllocations.size() - 1;
594 break;
595 case FREE_ORDER::RANDOM:
596 indexToFree = mainRand.Generate() % threadAllocations.size();
597 break;
598 }
599
600 {
601 DeallocationTimeRegisterObj timeRegisterObj{outResult};
602 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
603 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
604 else
605 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
606 }
607 threadAllocations.erase(threadAllocations.begin() + indexToFree);
608 }
609 };
610
611 uint32_t threadRandSeed = mainRand.Generate();
612 std::vector<std::thread> bkgThreads;
613 for(size_t i = 0; i < config.ThreadCount; ++i)
614 {
615 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
616 }
617
618 // Wait for threads reached max allocations
619 while(numThreadsReachedMaxAllocations < config.ThreadCount)
620 Sleep(0);
621
622 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
623 VmaStats vmaStats = {};
624 vmaCalculateStats(g_hAllocator, &vmaStats);
625 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
626 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
627 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
628
629 // Signal threads to deallocate
630 SetEvent(threadsFinishEvent);
631
632 // Wait for threads finished
633 for(size_t i = 0; i < bkgThreads.size(); ++i)
634 bkgThreads[i].join();
635 bkgThreads.clear();
636
637 CloseHandle(threadsFinishEvent);
638
639 // Deallocate remaining common resources
640 while(!commonAllocations.empty())
641 {
642 size_t indexToFree = 0;
643 switch(config.FreeOrder)
644 {
645 case FREE_ORDER::FORWARD:
646 indexToFree = 0;
647 break;
648 case FREE_ORDER::BACKWARD:
649 indexToFree = commonAllocations.size() - 1;
650 break;
651 case FREE_ORDER::RANDOM:
652 indexToFree = mainRand.Generate() % commonAllocations.size();
653 break;
654 }
655
656 {
657 DeallocationTimeRegisterObj timeRegisterObj{outResult};
658 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
659 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
660 else
661 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
662 }
663 commonAllocations.erase(commonAllocations.begin() + indexToFree);
664 }
665
666 if(allocationCount)
667 {
668 outResult.AllocationTimeAvg /= allocationCount;
669 outResult.DeallocationTimeAvg /= allocationCount;
670 }
671
672 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
673
674 return res;
675}
676
Adam Sawicki51fa9662018-10-03 13:44:29 +0200677void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100678{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100679 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100680 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200681 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100682 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200683 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100684}
685
686struct AllocInfo
687{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200688 VmaAllocation m_Allocation = VK_NULL_HANDLE;
689 VkBuffer m_Buffer = VK_NULL_HANDLE;
690 VkImage m_Image = VK_NULL_HANDLE;
691 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100692 union
693 {
694 VkBufferCreateInfo m_BufferInfo;
695 VkImageCreateInfo m_ImageInfo;
696 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200697
698 void CreateBuffer(
699 const VkBufferCreateInfo& bufCreateInfo,
700 const VmaAllocationCreateInfo& allocCreateInfo);
701 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100702};
703
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200704void AllocInfo::CreateBuffer(
705 const VkBufferCreateInfo& bufCreateInfo,
706 const VmaAllocationCreateInfo& allocCreateInfo)
707{
708 m_BufferInfo = bufCreateInfo;
709 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
710 TEST(res == VK_SUCCESS);
711}
712
713void AllocInfo::Destroy()
714{
715 if(m_Image)
716 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200717 vkDestroyImage(g_hDevice, m_Image, g_Allocs);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200718 }
719 if(m_Buffer)
720 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200721 vkDestroyBuffer(g_hDevice, m_Buffer, g_Allocs);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200722 }
723 if(m_Allocation)
724 {
725 vmaFreeMemory(g_hAllocator, m_Allocation);
726 }
727}
728
Adam Sawickif2975342018-10-16 13:49:02 +0200729class StagingBufferCollection
730{
731public:
732 StagingBufferCollection() { }
733 ~StagingBufferCollection();
734 // Returns false if maximum total size of buffers would be exceeded.
735 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
736 void ReleaseAllBuffers();
737
738private:
739 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
740 struct BufInfo
741 {
742 VmaAllocation Allocation = VK_NULL_HANDLE;
743 VkBuffer Buffer = VK_NULL_HANDLE;
744 VkDeviceSize Size = VK_WHOLE_SIZE;
745 void* MappedPtr = nullptr;
746 bool Used = false;
747 };
748 std::vector<BufInfo> m_Bufs;
749 // Including both used and unused.
750 VkDeviceSize m_TotalSize = 0;
751};
752
753StagingBufferCollection::~StagingBufferCollection()
754{
755 for(size_t i = m_Bufs.size(); i--; )
756 {
757 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
758 }
759}
760
761bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
762{
763 assert(size <= MAX_TOTAL_SIZE);
764
765 // Try to find existing unused buffer with best size.
766 size_t bestIndex = SIZE_MAX;
767 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
768 {
769 BufInfo& currBufInfo = m_Bufs[i];
770 if(!currBufInfo.Used && currBufInfo.Size >= size &&
771 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
772 {
773 bestIndex = i;
774 }
775 }
776
777 if(bestIndex != SIZE_MAX)
778 {
779 m_Bufs[bestIndex].Used = true;
780 outBuffer = m_Bufs[bestIndex].Buffer;
781 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
782 return true;
783 }
784
785 // Allocate new buffer with requested size.
786 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
787 {
788 BufInfo bufInfo;
789 bufInfo.Size = size;
790 bufInfo.Used = true;
791
792 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
793 bufCreateInfo.size = size;
794 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
795
796 VmaAllocationCreateInfo allocCreateInfo = {};
797 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
798 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
799
800 VmaAllocationInfo allocInfo;
801 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
802 bufInfo.MappedPtr = allocInfo.pMappedData;
803 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
804
805 outBuffer = bufInfo.Buffer;
806 outMappedPtr = bufInfo.MappedPtr;
807
808 m_Bufs.push_back(std::move(bufInfo));
809
810 m_TotalSize += size;
811
812 return true;
813 }
814
815 // There are some unused but smaller buffers: Free them and try again.
816 bool hasUnused = false;
817 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
818 {
819 if(!m_Bufs[i].Used)
820 {
821 hasUnused = true;
822 break;
823 }
824 }
825 if(hasUnused)
826 {
827 for(size_t i = m_Bufs.size(); i--; )
828 {
829 if(!m_Bufs[i].Used)
830 {
831 m_TotalSize -= m_Bufs[i].Size;
832 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
833 m_Bufs.erase(m_Bufs.begin() + i);
834 }
835 }
836
837 return AcquireBuffer(size, outBuffer, outMappedPtr);
838 }
839
840 return false;
841}
842
843void StagingBufferCollection::ReleaseAllBuffers()
844{
845 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
846 {
847 m_Bufs[i].Used = false;
848 }
849}
850
851static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
852{
853 StagingBufferCollection stagingBufs;
854
855 bool cmdBufferStarted = false;
856 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
857 {
858 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
859 if(currAllocInfo.m_Buffer)
860 {
861 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
862
863 VkBuffer stagingBuf = VK_NULL_HANDLE;
864 void* stagingBufMappedPtr = nullptr;
865 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
866 {
867 TEST(cmdBufferStarted);
868 EndSingleTimeCommands();
869 stagingBufs.ReleaseAllBuffers();
870 cmdBufferStarted = false;
871
872 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
873 TEST(ok);
874 }
875
876 // Fill staging buffer.
877 {
878 assert(size % sizeof(uint32_t) == 0);
879 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
880 uint32_t val = currAllocInfo.m_StartValue;
881 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
882 {
883 *stagingValPtr = val;
884 ++stagingValPtr;
885 ++val;
886 }
887 }
888
889 // Issue copy command from staging buffer to destination buffer.
890 if(!cmdBufferStarted)
891 {
892 cmdBufferStarted = true;
893 BeginSingleTimeCommands();
894 }
895
896 VkBufferCopy copy = {};
897 copy.srcOffset = 0;
898 copy.dstOffset = 0;
899 copy.size = size;
900 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
901 }
902 else
903 {
904 TEST(0 && "Images not currently supported.");
905 }
906 }
907
908 if(cmdBufferStarted)
909 {
910 EndSingleTimeCommands();
911 stagingBufs.ReleaseAllBuffers();
912 }
913}
914
915static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
916{
917 StagingBufferCollection stagingBufs;
918
919 bool cmdBufferStarted = false;
920 size_t validateAllocIndexOffset = 0;
921 std::vector<void*> validateStagingBuffers;
922 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
923 {
924 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
925 if(currAllocInfo.m_Buffer)
926 {
927 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
928
929 VkBuffer stagingBuf = VK_NULL_HANDLE;
930 void* stagingBufMappedPtr = nullptr;
931 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
932 {
933 TEST(cmdBufferStarted);
934 EndSingleTimeCommands();
935 cmdBufferStarted = false;
936
937 for(size_t validateIndex = 0;
938 validateIndex < validateStagingBuffers.size();
939 ++validateIndex)
940 {
941 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
942 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
943 TEST(validateSize % sizeof(uint32_t) == 0);
944 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
945 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
946 bool valid = true;
947 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
948 {
949 if(*stagingValPtr != val)
950 {
951 valid = false;
952 break;
953 }
954 ++stagingValPtr;
955 ++val;
956 }
957 TEST(valid);
958 }
959
960 stagingBufs.ReleaseAllBuffers();
961
962 validateAllocIndexOffset = allocInfoIndex;
963 validateStagingBuffers.clear();
964
965 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
966 TEST(ok);
967 }
968
969 // Issue copy command from staging buffer to destination buffer.
970 if(!cmdBufferStarted)
971 {
972 cmdBufferStarted = true;
973 BeginSingleTimeCommands();
974 }
975
976 VkBufferCopy copy = {};
977 copy.srcOffset = 0;
978 copy.dstOffset = 0;
979 copy.size = size;
980 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
981
982 // Sava mapped pointer for later validation.
983 validateStagingBuffers.push_back(stagingBufMappedPtr);
984 }
985 else
986 {
987 TEST(0 && "Images not currently supported.");
988 }
989 }
990
991 if(cmdBufferStarted)
992 {
993 EndSingleTimeCommands();
994
995 for(size_t validateIndex = 0;
996 validateIndex < validateStagingBuffers.size();
997 ++validateIndex)
998 {
999 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
1000 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
1001 TEST(validateSize % sizeof(uint32_t) == 0);
1002 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
1003 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
1004 bool valid = true;
1005 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
1006 {
1007 if(*stagingValPtr != val)
1008 {
1009 valid = false;
1010 break;
1011 }
1012 ++stagingValPtr;
1013 ++val;
1014 }
1015 TEST(valid);
1016 }
1017
1018 stagingBufs.ReleaseAllBuffers();
1019 }
1020}
1021
Adam Sawickib8333fb2018-03-13 16:15:53 +01001022static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1023{
1024 outMemReq = {};
1025 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1026 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1027}
1028
1029static void CreateBuffer(
1030 VmaPool pool,
1031 const VkBufferCreateInfo& bufCreateInfo,
1032 bool persistentlyMapped,
1033 AllocInfo& outAllocInfo)
1034{
1035 outAllocInfo = {};
1036 outAllocInfo.m_BufferInfo = bufCreateInfo;
1037
1038 VmaAllocationCreateInfo allocCreateInfo = {};
1039 allocCreateInfo.pool = pool;
1040 if(persistentlyMapped)
1041 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1042
1043 VmaAllocationInfo vmaAllocInfo = {};
1044 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1045
1046 // Setup StartValue and fill.
1047 {
1048 outAllocInfo.m_StartValue = (uint32_t)rand();
1049 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001050 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001051 if(!persistentlyMapped)
1052 {
1053 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1054 }
1055
1056 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001057 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001058 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1059 data[i] = value++;
1060
1061 if(!persistentlyMapped)
1062 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1063 }
1064}
1065
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001066static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001067{
1068 outAllocation.m_Allocation = nullptr;
1069 outAllocation.m_Buffer = nullptr;
1070 outAllocation.m_Image = nullptr;
1071 outAllocation.m_StartValue = (uint32_t)rand();
1072
1073 VmaAllocationCreateInfo vmaMemReq;
1074 GetMemReq(vmaMemReq);
1075
1076 VmaAllocationInfo allocInfo;
1077
1078 const bool isBuffer = true;//(rand() & 0x1) != 0;
1079 const bool isLarge = (rand() % 16) == 0;
1080 if(isBuffer)
1081 {
1082 const uint32_t bufferSize = isLarge ?
1083 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1084 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1085
1086 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1087 bufferInfo.size = bufferSize;
1088 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1089
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001090 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001091 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001092 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001093 }
1094 else
1095 {
1096 const uint32_t imageSizeX = isLarge ?
1097 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1098 rand() % 1024 + 1; // 1 ... 1024
1099 const uint32_t imageSizeY = isLarge ?
1100 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1101 rand() % 1024 + 1; // 1 ... 1024
1102
1103 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1104 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1105 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1106 imageInfo.extent.width = imageSizeX;
1107 imageInfo.extent.height = imageSizeY;
1108 imageInfo.extent.depth = 1;
1109 imageInfo.mipLevels = 1;
1110 imageInfo.arrayLayers = 1;
1111 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1112 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1113 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1114 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1115
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001116 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001117 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001118 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001119 }
1120
1121 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1122 if(allocInfo.pMappedData == nullptr)
1123 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001124 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126 }
1127
1128 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001129 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001130 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1131 data[i] = value++;
1132
1133 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001134 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001135}
1136
1137static void DestroyAllocation(const AllocInfo& allocation)
1138{
1139 if(allocation.m_Buffer)
1140 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1141 else
1142 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1143}
1144
1145static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1146{
1147 for(size_t i = allocations.size(); i--; )
1148 DestroyAllocation(allocations[i]);
1149 allocations.clear();
1150}
1151
1152static void ValidateAllocationData(const AllocInfo& allocation)
1153{
1154 VmaAllocationInfo allocInfo;
1155 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1156
1157 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1158 if(allocInfo.pMappedData == nullptr)
1159 {
1160 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001161 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001162 }
1163
1164 uint32_t value = allocation.m_StartValue;
1165 bool ok = true;
1166 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001167 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001168 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1169 {
1170 if(data[i] != value++)
1171 {
1172 ok = false;
1173 break;
1174 }
1175 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001176 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001177
1178 if(allocInfo.pMappedData == nullptr)
1179 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1180}
1181
1182static void RecreateAllocationResource(AllocInfo& allocation)
1183{
1184 VmaAllocationInfo allocInfo;
1185 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1186
1187 if(allocation.m_Buffer)
1188 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001189 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001190
Adam Sawicki1f84f622019-07-02 13:40:01 +02001191 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, g_Allocs, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001192 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001193
1194 // Just to silence validation layer warnings.
1195 VkMemoryRequirements vkMemReq;
1196 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001197 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001198
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001199 res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001200 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001201 }
1202 else
1203 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001204 vkDestroyImage(g_hDevice, allocation.m_Image, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001205
Adam Sawicki1f84f622019-07-02 13:40:01 +02001206 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, g_Allocs, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001207 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001208
1209 // Just to silence validation layer warnings.
1210 VkMemoryRequirements vkMemReq;
1211 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1212
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001213 res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001214 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001215 }
1216}
1217
1218static void Defragment(AllocInfo* allocs, size_t allocCount,
1219 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1220 VmaDefragmentationStats* defragmentationStats = nullptr)
1221{
1222 std::vector<VmaAllocation> vmaAllocs(allocCount);
1223 for(size_t i = 0; i < allocCount; ++i)
1224 vmaAllocs[i] = allocs[i].m_Allocation;
1225
1226 std::vector<VkBool32> allocChanged(allocCount);
1227
1228 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1229 defragmentationInfo, defragmentationStats) );
1230
1231 for(size_t i = 0; i < allocCount; ++i)
1232 {
1233 if(allocChanged[i])
1234 {
1235 RecreateAllocationResource(allocs[i]);
1236 }
1237 }
1238}
1239
1240static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1241{
1242 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1243 ValidateAllocationData(allocInfo);
1244 });
1245}
1246
1247void TestDefragmentationSimple()
1248{
1249 wprintf(L"Test defragmentation simple\n");
1250
1251 RandomNumberGenerator rand(667);
1252
1253 const VkDeviceSize BUF_SIZE = 0x10000;
1254 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1255
1256 const VkDeviceSize MIN_BUF_SIZE = 32;
1257 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1258 auto RandomBufSize = [&]() -> VkDeviceSize {
1259 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1260 };
1261
1262 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1263 bufCreateInfo.size = BUF_SIZE;
1264 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1265
1266 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1267 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1268
1269 uint32_t memTypeIndex = UINT32_MAX;
1270 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1271
1272 VmaPoolCreateInfo poolCreateInfo = {};
1273 poolCreateInfo.blockSize = BLOCK_SIZE;
1274 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1275
1276 VmaPool pool;
1277 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1278
Adam Sawickie1681912018-11-23 17:50:12 +01001279 // Defragmentation of empty pool.
1280 {
1281 VmaDefragmentationInfo2 defragInfo = {};
1282 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1283 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1284 defragInfo.poolCount = 1;
1285 defragInfo.pPools = &pool;
1286
1287 VmaDefragmentationStats defragStats = {};
1288 VmaDefragmentationContext defragCtx = nullptr;
1289 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1290 TEST(res >= VK_SUCCESS);
1291 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1292 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1293 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1294 }
1295
Adam Sawickib8333fb2018-03-13 16:15:53 +01001296 std::vector<AllocInfo> allocations;
1297
1298 // persistentlyMappedOption = 0 - not persistently mapped.
1299 // persistentlyMappedOption = 1 - persistently mapped.
1300 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1301 {
1302 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1303 const bool persistentlyMapped = persistentlyMappedOption != 0;
1304
1305 // # Test 1
1306 // Buffers of fixed size.
1307 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1308 // Expected result: at least 1 block freed.
1309 {
1310 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1311 {
1312 AllocInfo allocInfo;
1313 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1314 allocations.push_back(allocInfo);
1315 }
1316
1317 for(size_t i = 1; i < allocations.size(); ++i)
1318 {
1319 DestroyAllocation(allocations[i]);
1320 allocations.erase(allocations.begin() + i);
1321 }
1322
1323 VmaDefragmentationStats defragStats;
1324 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001325 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1326 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001327
1328 ValidateAllocationsData(allocations.data(), allocations.size());
1329
1330 DestroyAllAllocations(allocations);
1331 }
1332
1333 // # Test 2
1334 // Buffers of fixed size.
1335 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1336 // Expected result: Each of 4 interations makes some progress.
1337 {
1338 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1339 {
1340 AllocInfo allocInfo;
1341 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1342 allocations.push_back(allocInfo);
1343 }
1344
1345 for(size_t i = 1; i < allocations.size(); ++i)
1346 {
1347 DestroyAllocation(allocations[i]);
1348 allocations.erase(allocations.begin() + i);
1349 }
1350
1351 VmaDefragmentationInfo defragInfo = {};
1352 defragInfo.maxAllocationsToMove = 1;
1353 defragInfo.maxBytesToMove = BUF_SIZE;
1354
1355 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1356 {
1357 VmaDefragmentationStats defragStats;
1358 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001359 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001360 }
1361
1362 ValidateAllocationsData(allocations.data(), allocations.size());
1363
1364 DestroyAllAllocations(allocations);
1365 }
1366
1367 // # Test 3
1368 // Buffers of variable size.
1369 // Create a number of buffers. Remove some percent of them.
1370 // Defragment while having some percent of them unmovable.
1371 // Expected result: Just simple validation.
1372 {
1373 for(size_t i = 0; i < 100; ++i)
1374 {
1375 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1376 localBufCreateInfo.size = RandomBufSize();
1377
1378 AllocInfo allocInfo;
1379 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1380 allocations.push_back(allocInfo);
1381 }
1382
1383 const uint32_t percentToDelete = 60;
1384 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1385 for(size_t i = 0; i < numberToDelete; ++i)
1386 {
1387 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1388 DestroyAllocation(allocations[indexToDelete]);
1389 allocations.erase(allocations.begin() + indexToDelete);
1390 }
1391
1392 // Non-movable allocations will be at the beginning of allocations array.
1393 const uint32_t percentNonMovable = 20;
1394 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1395 for(size_t i = 0; i < numberNonMovable; ++i)
1396 {
1397 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1398 if(indexNonMovable != i)
1399 std::swap(allocations[i], allocations[indexNonMovable]);
1400 }
1401
1402 VmaDefragmentationStats defragStats;
1403 Defragment(
1404 allocations.data() + numberNonMovable,
1405 allocations.size() - numberNonMovable,
1406 nullptr, &defragStats);
1407
1408 ValidateAllocationsData(allocations.data(), allocations.size());
1409
1410 DestroyAllAllocations(allocations);
1411 }
1412 }
1413
Adam Sawicki647cf242018-11-23 17:58:00 +01001414 /*
1415 Allocation that must be move to an overlapping place using memmove().
1416 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1417 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001418 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001419 {
1420 AllocInfo allocInfo[2];
1421
1422 bufCreateInfo.size = BUF_SIZE;
1423 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1424 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1425 bufCreateInfo.size = biggerBufSize;
1426 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1427
1428 DestroyAllocation(allocInfo[0]);
1429
1430 VmaDefragmentationStats defragStats;
1431 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1432 // If this fails, it means we couldn't do memmove with overlapping regions.
1433 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1434
1435 ValidateAllocationsData(&allocInfo[1], 1);
1436 DestroyAllocation(allocInfo[1]);
1437 }
1438
Adam Sawickib8333fb2018-03-13 16:15:53 +01001439 vmaDestroyPool(g_hAllocator, pool);
1440}
1441
Adam Sawicki52076eb2018-11-22 16:14:50 +01001442void TestDefragmentationWholePool()
1443{
1444 wprintf(L"Test defragmentation whole pool\n");
1445
1446 RandomNumberGenerator rand(668);
1447
1448 const VkDeviceSize BUF_SIZE = 0x10000;
1449 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1450
1451 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1452 bufCreateInfo.size = BUF_SIZE;
1453 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1454
1455 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1456 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1457
1458 uint32_t memTypeIndex = UINT32_MAX;
1459 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1460
1461 VmaPoolCreateInfo poolCreateInfo = {};
1462 poolCreateInfo.blockSize = BLOCK_SIZE;
1463 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1464
1465 VmaDefragmentationStats defragStats[2];
1466 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1467 {
1468 VmaPool pool;
1469 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1470
1471 std::vector<AllocInfo> allocations;
1472
1473 // Buffers of fixed size.
1474 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1475 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1476 {
1477 AllocInfo allocInfo;
1478 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1479 allocations.push_back(allocInfo);
1480 }
1481
1482 for(size_t i = 1; i < allocations.size(); ++i)
1483 {
1484 DestroyAllocation(allocations[i]);
1485 allocations.erase(allocations.begin() + i);
1486 }
1487
1488 VmaDefragmentationInfo2 defragInfo = {};
1489 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1490 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1491 std::vector<VmaAllocation> allocationsToDefrag;
1492 if(caseIndex == 0)
1493 {
1494 defragInfo.poolCount = 1;
1495 defragInfo.pPools = &pool;
1496 }
1497 else
1498 {
1499 const size_t allocCount = allocations.size();
1500 allocationsToDefrag.resize(allocCount);
1501 std::transform(
1502 allocations.begin(), allocations.end(),
1503 allocationsToDefrag.begin(),
1504 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1505 defragInfo.allocationCount = (uint32_t)allocCount;
1506 defragInfo.pAllocations = allocationsToDefrag.data();
1507 }
1508
1509 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1510 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1511 TEST(res >= VK_SUCCESS);
1512 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1513
1514 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1515
1516 ValidateAllocationsData(allocations.data(), allocations.size());
1517
1518 DestroyAllAllocations(allocations);
1519
1520 vmaDestroyPool(g_hAllocator, pool);
1521 }
1522
1523 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1524 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1525 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1526 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1527}
1528
Adam Sawickib8333fb2018-03-13 16:15:53 +01001529void TestDefragmentationFull()
1530{
1531 std::vector<AllocInfo> allocations;
1532
1533 // Create initial allocations.
1534 for(size_t i = 0; i < 400; ++i)
1535 {
1536 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001537 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001538 allocations.push_back(allocation);
1539 }
1540
1541 // Delete random allocations
1542 const size_t allocationsToDeletePercent = 80;
1543 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1544 for(size_t i = 0; i < allocationsToDelete; ++i)
1545 {
1546 size_t index = (size_t)rand() % allocations.size();
1547 DestroyAllocation(allocations[index]);
1548 allocations.erase(allocations.begin() + index);
1549 }
1550
1551 for(size_t i = 0; i < allocations.size(); ++i)
1552 ValidateAllocationData(allocations[i]);
1553
Adam Sawicki0667e332018-08-24 17:26:44 +02001554 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001555
1556 {
1557 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1558 for(size_t i = 0; i < allocations.size(); ++i)
1559 vmaAllocations[i] = allocations[i].m_Allocation;
1560
1561 const size_t nonMovablePercent = 0;
1562 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1563 for(size_t i = 0; i < nonMovableCount; ++i)
1564 {
1565 size_t index = (size_t)rand() % vmaAllocations.size();
1566 vmaAllocations.erase(vmaAllocations.begin() + index);
1567 }
1568
1569 const uint32_t defragCount = 1;
1570 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1571 {
1572 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1573
1574 VmaDefragmentationInfo defragmentationInfo;
1575 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1576 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1577
1578 wprintf(L"Defragmentation #%u\n", defragIndex);
1579
1580 time_point begTime = std::chrono::high_resolution_clock::now();
1581
1582 VmaDefragmentationStats stats;
1583 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001584 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001585
1586 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1587
1588 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1589 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1590 wprintf(L"Time: %.2f s\n", defragmentDuration);
1591
1592 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1593 {
1594 if(allocationsChanged[i])
1595 {
1596 RecreateAllocationResource(allocations[i]);
1597 }
1598 }
1599
1600 for(size_t i = 0; i < allocations.size(); ++i)
1601 ValidateAllocationData(allocations[i]);
1602
Adam Sawicki0667e332018-08-24 17:26:44 +02001603 //wchar_t fileName[MAX_PATH];
1604 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1605 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001606 }
1607 }
1608
1609 // Destroy all remaining allocations.
1610 DestroyAllAllocations(allocations);
1611}
1612
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001613static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001614{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001615 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001616 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001617
1618 std::vector<AllocInfo> allocations;
1619
1620 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001621 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1622 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001623 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001624 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1625 const size_t percentToLeave = 30;
1626 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001627 RandomNumberGenerator rand = { 234522 };
1628
1629 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001630
1631 VmaAllocationCreateInfo allocCreateInfo = {};
1632 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001633 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001634
1635 // Create all intended buffers.
1636 for(size_t i = 0; i < bufCount; ++i)
1637 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001638 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1639
1640 if(rand.Generate() % 100 < percentNonMovable)
1641 {
1642 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1643 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1644 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1645 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1646 }
1647 else
1648 {
1649 // Different usage just to see different color in output from VmaDumpVis.
1650 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1651 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1652 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1653 // And in JSON dump.
1654 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1655 }
1656
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001657 AllocInfo alloc;
1658 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1659 alloc.m_StartValue = rand.Generate();
1660 allocations.push_back(alloc);
1661 }
1662
1663 // Destroy some percentage of them.
1664 {
1665 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1666 for(size_t i = 0; i < buffersToDestroy; ++i)
1667 {
1668 const size_t index = rand.Generate() % allocations.size();
1669 allocations[index].Destroy();
1670 allocations.erase(allocations.begin() + index);
1671 }
1672 }
1673
1674 // Fill them with meaningful data.
1675 UploadGpuData(allocations.data(), allocations.size());
1676
Adam Sawickic6ede152018-11-16 17:04:14 +01001677 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001678 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001679 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001680
1681 // Defragment using GPU only.
1682 {
1683 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001684
Adam Sawickic6ede152018-11-16 17:04:14 +01001685 std::vector<VmaAllocation> allocationPtrs;
1686 std::vector<VkBool32> allocationChanged;
1687 std::vector<size_t> allocationOriginalIndex;
1688
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001689 for(size_t i = 0; i < allocCount; ++i)
1690 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001691 VmaAllocationInfo allocInfo = {};
1692 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1693 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1694 {
1695 allocationPtrs.push_back(allocations[i].m_Allocation);
1696 allocationChanged.push_back(VK_FALSE);
1697 allocationOriginalIndex.push_back(i);
1698 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001699 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001700
1701 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001702
1703 BeginSingleTimeCommands();
1704
1705 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001706 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001707 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001708 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001709 defragInfo.pAllocationsChanged = allocationChanged.data();
1710 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001711 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1712 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1713
1714 VmaDefragmentationStats stats = {};
1715 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1716 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1717 TEST(res >= VK_SUCCESS);
1718
1719 EndSingleTimeCommands();
1720
1721 vmaDefragmentationEnd(g_hAllocator, ctx);
1722
Adam Sawickic6ede152018-11-16 17:04:14 +01001723 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001724 {
1725 if(allocationChanged[i])
1726 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001727 const size_t origAllocIndex = allocationOriginalIndex[i];
1728 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001729 }
1730 }
1731
Adam Sawicki4d844e22019-01-24 16:21:05 +01001732 // If corruption detection is enabled, GPU defragmentation may not work on
1733 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001734 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001735 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1736 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001737 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001738 }
1739
1740 ValidateGpuData(allocations.data(), allocations.size());
1741
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001742 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001743 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001744
1745 // Destroy all remaining buffers.
1746 for(size_t i = allocations.size(); i--; )
1747 {
1748 allocations[i].Destroy();
1749 }
Adam Sawicki05704002018-11-08 16:07:29 +01001750
1751 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001752}
1753
Adam Sawickib8333fb2018-03-13 16:15:53 +01001754static void TestUserData()
1755{
1756 VkResult res;
1757
1758 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1759 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1760 bufCreateInfo.size = 0x10000;
1761
1762 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1763 {
1764 // Opaque pointer
1765 {
1766
1767 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1768 void* pointerToSomething = &res;
1769
1770 VmaAllocationCreateInfo allocCreateInfo = {};
1771 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1772 allocCreateInfo.pUserData = numberAsPointer;
1773 if(testIndex == 1)
1774 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1775
1776 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1777 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001778 TEST(res == VK_SUCCESS);
1779 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001780
1781 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001782 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001783
1784 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1785 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001786 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001787
1788 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1789 }
1790
1791 // String
1792 {
1793 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1794 const char* name2 = "2";
1795 const size_t name1Len = strlen(name1);
1796
1797 char* name1Buf = new char[name1Len + 1];
1798 strcpy_s(name1Buf, name1Len + 1, name1);
1799
1800 VmaAllocationCreateInfo allocCreateInfo = {};
1801 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1802 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1803 allocCreateInfo.pUserData = name1Buf;
1804 if(testIndex == 1)
1805 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1806
1807 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1808 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001809 TEST(res == VK_SUCCESS);
1810 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1811 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001812
1813 delete[] name1Buf;
1814
1815 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001816 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001817
1818 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1819 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001820 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001821
1822 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1823 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001824 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001825
1826 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1827 }
1828 }
1829}
1830
Adam Sawicki370ab182018-11-08 16:31:00 +01001831static void TestInvalidAllocations()
1832{
1833 VkResult res;
1834
1835 VmaAllocationCreateInfo allocCreateInfo = {};
1836 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1837
1838 // Try to allocate 0 bytes.
1839 {
1840 VkMemoryRequirements memReq = {};
1841 memReq.size = 0; // !!!
1842 memReq.alignment = 4;
1843 memReq.memoryTypeBits = UINT32_MAX;
1844 VmaAllocation alloc = VK_NULL_HANDLE;
1845 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1846 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1847 }
1848
1849 // Try to create buffer with size = 0.
1850 {
1851 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1852 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1853 bufCreateInfo.size = 0; // !!!
1854 VkBuffer buf = VK_NULL_HANDLE;
1855 VmaAllocation alloc = VK_NULL_HANDLE;
1856 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1857 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1858 }
1859
1860 // Try to create image with one dimension = 0.
1861 {
1862 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1863 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1864 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1865 imageCreateInfo.extent.width = 128;
1866 imageCreateInfo.extent.height = 0; // !!!
1867 imageCreateInfo.extent.depth = 1;
1868 imageCreateInfo.mipLevels = 1;
1869 imageCreateInfo.arrayLayers = 1;
1870 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1871 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1872 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1873 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1874 VkImage image = VK_NULL_HANDLE;
1875 VmaAllocation alloc = VK_NULL_HANDLE;
1876 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1877 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1878 }
1879}
1880
Adam Sawickib8333fb2018-03-13 16:15:53 +01001881static void TestMemoryRequirements()
1882{
1883 VkResult res;
1884 VkBuffer buf;
1885 VmaAllocation alloc;
1886 VmaAllocationInfo allocInfo;
1887
1888 const VkPhysicalDeviceMemoryProperties* memProps;
1889 vmaGetMemoryProperties(g_hAllocator, &memProps);
1890
1891 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1892 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1893 bufInfo.size = 128;
1894
1895 VmaAllocationCreateInfo allocCreateInfo = {};
1896
1897 // No requirements.
1898 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001899 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001900 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1901
1902 // Usage.
1903 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1904 allocCreateInfo.requiredFlags = 0;
1905 allocCreateInfo.preferredFlags = 0;
1906 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1907
1908 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001909 TEST(res == VK_SUCCESS);
1910 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001911 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1912
1913 // Required flags, preferred flags.
1914 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1915 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1916 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1917 allocCreateInfo.memoryTypeBits = 0;
1918
1919 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001920 TEST(res == VK_SUCCESS);
1921 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1922 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001923 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1924
1925 // memoryTypeBits.
1926 const uint32_t memType = allocInfo.memoryType;
1927 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1928 allocCreateInfo.requiredFlags = 0;
1929 allocCreateInfo.preferredFlags = 0;
1930 allocCreateInfo.memoryTypeBits = 1u << memType;
1931
1932 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001933 TEST(res == VK_SUCCESS);
1934 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001935 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1936
1937}
1938
1939static void TestBasics()
1940{
1941 VkResult res;
1942
1943 TestMemoryRequirements();
1944
1945 // Lost allocation
1946 {
1947 VmaAllocation alloc = VK_NULL_HANDLE;
1948 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001949 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001950
1951 VmaAllocationInfo allocInfo;
1952 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001953 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1954 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001955
1956 vmaFreeMemory(g_hAllocator, alloc);
1957 }
1958
1959 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1960 {
1961 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1962 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1963 bufCreateInfo.size = 128;
1964
1965 VmaAllocationCreateInfo allocCreateInfo = {};
1966 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1967 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1968
1969 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1970 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001971 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001972
1973 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1974
1975 // Same with OWN_MEMORY.
1976 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1977
1978 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001979 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001980
1981 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1982 }
1983
1984 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001985
1986 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001987}
1988
1989void TestHeapSizeLimit()
1990{
Adam Sawickib3f51102019-11-18 13:05:56 +01001991 const VkDeviceSize HEAP_SIZE_LIMIT = 200ull * 1024 * 1024; // 200 MB
1992 const VkDeviceSize BLOCK_SIZE = 20ull * 1024 * 1024; // 20 MB
Adam Sawickib8333fb2018-03-13 16:15:53 +01001993
1994 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1995 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1996 {
1997 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1998 }
1999
2000 VmaAllocatorCreateInfo allocatorCreateInfo = {};
2001 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
2002 allocatorCreateInfo.device = g_hDevice;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002003 allocatorCreateInfo.instance = g_hVulkanInstance;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002004 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
2005
2006 VmaAllocator hAllocator;
2007 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002008 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002009
2010 struct Item
2011 {
2012 VkBuffer hBuf;
2013 VmaAllocation hAlloc;
2014 };
2015 std::vector<Item> items;
2016
2017 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2018 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2019
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002020 // 1. Allocate two blocks of dedicated memory, half the size of BLOCK_SIZE.
2021 VmaAllocationInfo dedicatedAllocInfo;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002022 {
2023 VmaAllocationCreateInfo allocCreateInfo = {};
2024 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2025 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2026
2027 bufCreateInfo.size = BLOCK_SIZE / 2;
2028
2029 for(size_t i = 0; i < 2; ++i)
2030 {
2031 Item item;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002032 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &dedicatedAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002033 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002034 items.push_back(item);
2035 }
2036 }
2037
2038 // Create pool to make sure allocations must be out of this memory type.
2039 VmaPoolCreateInfo poolCreateInfo = {};
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002040 poolCreateInfo.memoryTypeIndex = dedicatedAllocInfo.memoryType;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002041 poolCreateInfo.blockSize = BLOCK_SIZE;
2042
2043 VmaPool hPool;
2044 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002045 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002046
2047 // 2. Allocate normal buffers from all the remaining memory.
2048 {
2049 VmaAllocationCreateInfo allocCreateInfo = {};
2050 allocCreateInfo.pool = hPool;
2051
2052 bufCreateInfo.size = BLOCK_SIZE / 2;
2053
2054 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2055 for(size_t i = 0; i < bufCount; ++i)
2056 {
2057 Item item;
2058 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002059 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002060 items.push_back(item);
2061 }
2062 }
2063
2064 // 3. Allocation of one more (even small) buffer should fail.
2065 {
2066 VmaAllocationCreateInfo allocCreateInfo = {};
2067 allocCreateInfo.pool = hPool;
2068
2069 bufCreateInfo.size = 128;
2070
2071 VkBuffer hBuf;
2072 VmaAllocation hAlloc;
2073 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002074 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002075 }
2076
2077 // Destroy everything.
2078 for(size_t i = items.size(); i--; )
2079 {
2080 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2081 }
2082
2083 vmaDestroyPool(hAllocator, hPool);
2084
2085 vmaDestroyAllocator(hAllocator);
2086}
2087
Adam Sawicki212a4a62018-06-14 15:44:45 +02002088#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002089static void TestDebugMargin()
2090{
2091 if(VMA_DEBUG_MARGIN == 0)
2092 {
2093 return;
2094 }
2095
2096 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002097 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002098
2099 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002100 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002101
2102 // Create few buffers of different size.
2103 const size_t BUF_COUNT = 10;
2104 BufferInfo buffers[BUF_COUNT];
2105 VmaAllocationInfo allocInfo[BUF_COUNT];
2106 for(size_t i = 0; i < 10; ++i)
2107 {
2108 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002109 // Last one will be mapped.
2110 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002111
2112 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002113 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002114 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002115 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002116
2117 if(i == BUF_COUNT - 1)
2118 {
2119 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002120 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002121 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2122 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2123 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002124 }
2125
2126 // Check if their offsets preserve margin between them.
2127 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2128 {
2129 if(lhs.deviceMemory != rhs.deviceMemory)
2130 {
2131 return lhs.deviceMemory < rhs.deviceMemory;
2132 }
2133 return lhs.offset < rhs.offset;
2134 });
2135 for(size_t i = 1; i < BUF_COUNT; ++i)
2136 {
2137 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2138 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002139 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002140 }
2141 }
2142
Adam Sawicki212a4a62018-06-14 15:44:45 +02002143 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002144 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002145
Adam Sawicki73b16652018-06-11 16:39:25 +02002146 // Destroy all buffers.
2147 for(size_t i = BUF_COUNT; i--; )
2148 {
2149 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2150 }
2151}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002152#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002153
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002154static void TestLinearAllocator()
2155{
2156 wprintf(L"Test linear allocator\n");
2157
2158 RandomNumberGenerator rand{645332};
2159
2160 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2161 sampleBufCreateInfo.size = 1024; // Whatever.
2162 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2163
2164 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2165 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2166
2167 VmaPoolCreateInfo poolCreateInfo = {};
2168 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002169 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002170
Adam Sawickiee082772018-06-20 17:45:49 +02002171 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002172 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2173 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2174
2175 VmaPool pool = nullptr;
2176 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002177 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002178
2179 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2180
2181 VmaAllocationCreateInfo allocCreateInfo = {};
2182 allocCreateInfo.pool = pool;
2183
2184 constexpr size_t maxBufCount = 100;
2185 std::vector<BufferInfo> bufInfo;
2186
2187 constexpr VkDeviceSize bufSizeMin = 16;
2188 constexpr VkDeviceSize bufSizeMax = 1024;
2189 VmaAllocationInfo allocInfo;
2190 VkDeviceSize prevOffset = 0;
2191
2192 // Test one-time free.
2193 for(size_t i = 0; i < 2; ++i)
2194 {
2195 // Allocate number of buffers of varying size that surely fit into this block.
2196 VkDeviceSize bufSumSize = 0;
2197 for(size_t i = 0; i < maxBufCount; ++i)
2198 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002199 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002200 BufferInfo newBufInfo;
2201 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2202 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002203 TEST(res == VK_SUCCESS);
2204 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002205 bufInfo.push_back(newBufInfo);
2206 prevOffset = allocInfo.offset;
2207 bufSumSize += bufCreateInfo.size;
2208 }
2209
2210 // Validate pool stats.
2211 VmaPoolStats stats;
2212 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002213 TEST(stats.size == poolCreateInfo.blockSize);
2214 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2215 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002216
2217 // Destroy the buffers in random order.
2218 while(!bufInfo.empty())
2219 {
2220 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2221 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2222 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2223 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2224 }
2225 }
2226
2227 // Test stack.
2228 {
2229 // Allocate number of buffers of varying size that surely fit into this block.
2230 for(size_t i = 0; i < maxBufCount; ++i)
2231 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002232 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002233 BufferInfo newBufInfo;
2234 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2235 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002236 TEST(res == VK_SUCCESS);
2237 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002238 bufInfo.push_back(newBufInfo);
2239 prevOffset = allocInfo.offset;
2240 }
2241
2242 // Destroy few buffers from top of the stack.
2243 for(size_t i = 0; i < maxBufCount / 5; ++i)
2244 {
2245 const BufferInfo& currBufInfo = bufInfo.back();
2246 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2247 bufInfo.pop_back();
2248 }
2249
2250 // Create some more
2251 for(size_t i = 0; i < maxBufCount / 5; ++i)
2252 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002253 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002254 BufferInfo newBufInfo;
2255 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2256 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002257 TEST(res == VK_SUCCESS);
2258 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002259 bufInfo.push_back(newBufInfo);
2260 prevOffset = allocInfo.offset;
2261 }
2262
2263 // Destroy the buffers in reverse order.
2264 while(!bufInfo.empty())
2265 {
2266 const BufferInfo& currBufInfo = bufInfo.back();
2267 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2268 bufInfo.pop_back();
2269 }
2270 }
2271
Adam Sawickiee082772018-06-20 17:45:49 +02002272 // Test ring buffer.
2273 {
2274 // Allocate number of buffers that surely fit into this block.
2275 bufCreateInfo.size = bufSizeMax;
2276 for(size_t i = 0; i < maxBufCount; ++i)
2277 {
2278 BufferInfo newBufInfo;
2279 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2280 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002281 TEST(res == VK_SUCCESS);
2282 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002283 bufInfo.push_back(newBufInfo);
2284 prevOffset = allocInfo.offset;
2285 }
2286
2287 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2288 const size_t buffersPerIter = maxBufCount / 10 - 1;
2289 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2290 for(size_t iter = 0; iter < iterCount; ++iter)
2291 {
2292 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2293 {
2294 const BufferInfo& currBufInfo = bufInfo.front();
2295 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2296 bufInfo.erase(bufInfo.begin());
2297 }
2298 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2299 {
2300 BufferInfo newBufInfo;
2301 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2302 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002303 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002304 bufInfo.push_back(newBufInfo);
2305 }
2306 }
2307
2308 // Allocate buffers until we reach out-of-memory.
2309 uint32_t debugIndex = 0;
2310 while(res == VK_SUCCESS)
2311 {
2312 BufferInfo newBufInfo;
2313 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2314 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2315 if(res == VK_SUCCESS)
2316 {
2317 bufInfo.push_back(newBufInfo);
2318 }
2319 else
2320 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002321 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002322 }
2323 ++debugIndex;
2324 }
2325
2326 // Destroy the buffers in random order.
2327 while(!bufInfo.empty())
2328 {
2329 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2330 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2331 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2332 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2333 }
2334 }
2335
Adam Sawicki680b2252018-08-22 14:47:32 +02002336 // Test double stack.
2337 {
2338 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2339 VkDeviceSize prevOffsetLower = 0;
2340 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2341 for(size_t i = 0; i < maxBufCount; ++i)
2342 {
2343 const bool upperAddress = (i % 2) != 0;
2344 if(upperAddress)
2345 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2346 else
2347 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002348 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002349 BufferInfo newBufInfo;
2350 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2351 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002352 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002353 if(upperAddress)
2354 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002355 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002356 prevOffsetUpper = allocInfo.offset;
2357 }
2358 else
2359 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002360 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002361 prevOffsetLower = allocInfo.offset;
2362 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002363 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002364 bufInfo.push_back(newBufInfo);
2365 }
2366
2367 // Destroy few buffers from top of the stack.
2368 for(size_t i = 0; i < maxBufCount / 5; ++i)
2369 {
2370 const BufferInfo& currBufInfo = bufInfo.back();
2371 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2372 bufInfo.pop_back();
2373 }
2374
2375 // Create some more
2376 for(size_t i = 0; i < maxBufCount / 5; ++i)
2377 {
2378 const bool upperAddress = (i % 2) != 0;
2379 if(upperAddress)
2380 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2381 else
2382 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002383 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002384 BufferInfo newBufInfo;
2385 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2386 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002387 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002388 bufInfo.push_back(newBufInfo);
2389 }
2390
2391 // Destroy the buffers in reverse order.
2392 while(!bufInfo.empty())
2393 {
2394 const BufferInfo& currBufInfo = bufInfo.back();
2395 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2396 bufInfo.pop_back();
2397 }
2398
2399 // Create buffers on both sides until we reach out of memory.
2400 prevOffsetLower = 0;
2401 prevOffsetUpper = poolCreateInfo.blockSize;
2402 res = VK_SUCCESS;
2403 for(size_t i = 0; res == VK_SUCCESS; ++i)
2404 {
2405 const bool upperAddress = (i % 2) != 0;
2406 if(upperAddress)
2407 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2408 else
2409 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002410 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002411 BufferInfo newBufInfo;
2412 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2413 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2414 if(res == VK_SUCCESS)
2415 {
2416 if(upperAddress)
2417 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002418 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002419 prevOffsetUpper = allocInfo.offset;
2420 }
2421 else
2422 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002423 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002424 prevOffsetLower = allocInfo.offset;
2425 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002426 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002427 bufInfo.push_back(newBufInfo);
2428 }
2429 }
2430
2431 // Destroy the buffers in random order.
2432 while(!bufInfo.empty())
2433 {
2434 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2435 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2436 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2437 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2438 }
2439
2440 // Create buffers on upper side only, constant size, until we reach out of memory.
2441 prevOffsetUpper = poolCreateInfo.blockSize;
2442 res = VK_SUCCESS;
2443 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2444 bufCreateInfo.size = bufSizeMax;
2445 for(size_t i = 0; res == VK_SUCCESS; ++i)
2446 {
2447 BufferInfo newBufInfo;
2448 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2449 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2450 if(res == VK_SUCCESS)
2451 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002452 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002453 prevOffsetUpper = allocInfo.offset;
2454 bufInfo.push_back(newBufInfo);
2455 }
2456 }
2457
2458 // Destroy the buffers in reverse order.
2459 while(!bufInfo.empty())
2460 {
2461 const BufferInfo& currBufInfo = bufInfo.back();
2462 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2463 bufInfo.pop_back();
2464 }
2465 }
2466
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002467 // Test ring buffer with lost allocations.
2468 {
2469 // Allocate number of buffers until pool is full.
2470 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2471 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2472 res = VK_SUCCESS;
2473 for(size_t i = 0; res == VK_SUCCESS; ++i)
2474 {
2475 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2476
Adam Sawickifd366b62019-01-24 15:26:43 +01002477 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002478
2479 BufferInfo newBufInfo;
2480 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2481 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2482 if(res == VK_SUCCESS)
2483 bufInfo.push_back(newBufInfo);
2484 }
2485
2486 // Free first half of it.
2487 {
2488 const size_t buffersToDelete = bufInfo.size() / 2;
2489 for(size_t i = 0; i < buffersToDelete; ++i)
2490 {
2491 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2492 }
2493 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2494 }
2495
2496 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002497 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002498 res = VK_SUCCESS;
2499 for(size_t i = 0; res == VK_SUCCESS; ++i)
2500 {
2501 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2502
Adam Sawickifd366b62019-01-24 15:26:43 +01002503 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002504
2505 BufferInfo newBufInfo;
2506 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2507 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2508 if(res == VK_SUCCESS)
2509 bufInfo.push_back(newBufInfo);
2510 }
2511
2512 VkDeviceSize firstNewOffset;
2513 {
2514 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2515
2516 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2517 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2518 bufCreateInfo.size = bufSizeMax;
2519
2520 BufferInfo newBufInfo;
2521 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2522 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002523 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002524 bufInfo.push_back(newBufInfo);
2525 firstNewOffset = allocInfo.offset;
2526
2527 // Make sure at least one buffer from the beginning became lost.
2528 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002529 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002530 }
2531
Adam Sawickifd366b62019-01-24 15:26:43 +01002532#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002533 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2534 size_t newCount = 1;
2535 for(;;)
2536 {
2537 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2538
Adam Sawickifd366b62019-01-24 15:26:43 +01002539 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002540
2541 BufferInfo newBufInfo;
2542 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2543 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01002544
Adam Sawickib8d34d52018-10-03 17:41:20 +02002545 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002546 bufInfo.push_back(newBufInfo);
2547 ++newCount;
2548 if(allocInfo.offset < firstNewOffset)
2549 break;
2550 }
Adam Sawickifd366b62019-01-24 15:26:43 +01002551#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002552
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002553 // Delete buffers that are lost.
2554 for(size_t i = bufInfo.size(); i--; )
2555 {
2556 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2557 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2558 {
2559 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2560 bufInfo.erase(bufInfo.begin() + i);
2561 }
2562 }
2563
2564 // Test vmaMakePoolAllocationsLost
2565 {
2566 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2567
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01002568 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002569 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002570 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002571
2572 size_t realLostAllocCount = 0;
2573 for(size_t i = 0; i < bufInfo.size(); ++i)
2574 {
2575 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2576 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2577 ++realLostAllocCount;
2578 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002579 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002580 }
2581
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002582 // Destroy all the buffers in forward order.
2583 for(size_t i = 0; i < bufInfo.size(); ++i)
2584 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2585 bufInfo.clear();
2586 }
2587
Adam Sawicki70a683e2018-08-24 15:36:32 +02002588 vmaDestroyPool(g_hAllocator, pool);
2589}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002590
Adam Sawicki70a683e2018-08-24 15:36:32 +02002591static void TestLinearAllocatorMultiBlock()
2592{
2593 wprintf(L"Test linear allocator multi block\n");
2594
2595 RandomNumberGenerator rand{345673};
2596
2597 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2598 sampleBufCreateInfo.size = 1024 * 1024;
2599 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2600
2601 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2602 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2603
2604 VmaPoolCreateInfo poolCreateInfo = {};
2605 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2606 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002607 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002608
2609 VmaPool pool = nullptr;
2610 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002611 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002612
2613 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2614
2615 VmaAllocationCreateInfo allocCreateInfo = {};
2616 allocCreateInfo.pool = pool;
2617
2618 std::vector<BufferInfo> bufInfo;
2619 VmaAllocationInfo allocInfo;
2620
2621 // Test one-time free.
2622 {
2623 // Allocate buffers until we move to a second block.
2624 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2625 for(uint32_t i = 0; ; ++i)
2626 {
2627 BufferInfo newBufInfo;
2628 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2629 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002630 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002631 bufInfo.push_back(newBufInfo);
2632 if(lastMem && allocInfo.deviceMemory != lastMem)
2633 {
2634 break;
2635 }
2636 lastMem = allocInfo.deviceMemory;
2637 }
2638
Adam Sawickib8d34d52018-10-03 17:41:20 +02002639 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002640
2641 // Make sure that pool has now two blocks.
2642 VmaPoolStats poolStats = {};
2643 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002644 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002645
2646 // Destroy all the buffers in random order.
2647 while(!bufInfo.empty())
2648 {
2649 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2650 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2651 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2652 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2653 }
2654
2655 // Make sure that pool has now at most one block.
2656 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002657 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002658 }
2659
2660 // Test stack.
2661 {
2662 // Allocate buffers until we move to a second block.
2663 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2664 for(uint32_t i = 0; ; ++i)
2665 {
2666 BufferInfo newBufInfo;
2667 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2668 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002669 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002670 bufInfo.push_back(newBufInfo);
2671 if(lastMem && allocInfo.deviceMemory != lastMem)
2672 {
2673 break;
2674 }
2675 lastMem = allocInfo.deviceMemory;
2676 }
2677
Adam Sawickib8d34d52018-10-03 17:41:20 +02002678 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002679
2680 // Add few more buffers.
2681 for(uint32_t i = 0; i < 5; ++i)
2682 {
2683 BufferInfo newBufInfo;
2684 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2685 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002686 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002687 bufInfo.push_back(newBufInfo);
2688 }
2689
2690 // Make sure that pool has now two blocks.
2691 VmaPoolStats poolStats = {};
2692 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002693 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002694
2695 // Delete half of buffers, LIFO.
2696 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2697 {
2698 const BufferInfo& currBufInfo = bufInfo.back();
2699 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2700 bufInfo.pop_back();
2701 }
2702
2703 // Add one more buffer.
2704 BufferInfo newBufInfo;
2705 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2706 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002707 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002708 bufInfo.push_back(newBufInfo);
2709
2710 // Make sure that pool has now one block.
2711 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002712 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002713
2714 // Delete all the remaining buffers, LIFO.
2715 while(!bufInfo.empty())
2716 {
2717 const BufferInfo& currBufInfo = bufInfo.back();
2718 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2719 bufInfo.pop_back();
2720 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002721 }
2722
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002723 vmaDestroyPool(g_hAllocator, pool);
2724}
2725
Adam Sawickifd11d752018-08-22 15:02:10 +02002726static void ManuallyTestLinearAllocator()
2727{
2728 VmaStats origStats;
2729 vmaCalculateStats(g_hAllocator, &origStats);
2730
2731 wprintf(L"Manually test linear allocator\n");
2732
2733 RandomNumberGenerator rand{645332};
2734
2735 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2736 sampleBufCreateInfo.size = 1024; // Whatever.
2737 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2738
2739 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2740 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2741
2742 VmaPoolCreateInfo poolCreateInfo = {};
2743 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002744 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002745
2746 poolCreateInfo.blockSize = 10 * 1024;
2747 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2748 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2749
2750 VmaPool pool = nullptr;
2751 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002752 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002753
2754 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2755
2756 VmaAllocationCreateInfo allocCreateInfo = {};
2757 allocCreateInfo.pool = pool;
2758
2759 std::vector<BufferInfo> bufInfo;
2760 VmaAllocationInfo allocInfo;
2761 BufferInfo newBufInfo;
2762
2763 // Test double stack.
2764 {
2765 /*
2766 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2767 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2768
2769 Totally:
2770 1 block allocated
2771 10240 Vulkan bytes
2772 6 new allocations
2773 2256 bytes in allocations
2774 */
2775
2776 bufCreateInfo.size = 32;
2777 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2778 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002779 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002780 bufInfo.push_back(newBufInfo);
2781
2782 bufCreateInfo.size = 1024;
2783 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2784 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002785 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002786 bufInfo.push_back(newBufInfo);
2787
2788 bufCreateInfo.size = 32;
2789 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2790 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002791 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002792 bufInfo.push_back(newBufInfo);
2793
2794 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2795
2796 bufCreateInfo.size = 128;
2797 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2798 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002799 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002800 bufInfo.push_back(newBufInfo);
2801
2802 bufCreateInfo.size = 1024;
2803 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2804 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002805 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002806 bufInfo.push_back(newBufInfo);
2807
2808 bufCreateInfo.size = 16;
2809 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2810 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002811 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002812 bufInfo.push_back(newBufInfo);
2813
2814 VmaStats currStats;
2815 vmaCalculateStats(g_hAllocator, &currStats);
2816 VmaPoolStats poolStats;
2817 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2818
2819 char* statsStr = nullptr;
2820 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2821
2822 // PUT BREAKPOINT HERE TO CHECK.
2823 // Inspect: currStats versus origStats, poolStats, statsStr.
2824 int I = 0;
2825
2826 vmaFreeStatsString(g_hAllocator, statsStr);
2827
2828 // Destroy the buffers in reverse order.
2829 while(!bufInfo.empty())
2830 {
2831 const BufferInfo& currBufInfo = bufInfo.back();
2832 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2833 bufInfo.pop_back();
2834 }
2835 }
2836
2837 vmaDestroyPool(g_hAllocator, pool);
2838}
2839
Adam Sawicki80927152018-09-07 17:27:23 +02002840static void BenchmarkAlgorithmsCase(FILE* file,
2841 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002842 bool empty,
2843 VmaAllocationCreateFlags allocStrategy,
2844 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002845{
2846 RandomNumberGenerator rand{16223};
2847
2848 const VkDeviceSize bufSizeMin = 32;
2849 const VkDeviceSize bufSizeMax = 1024;
2850 const size_t maxBufCapacity = 10000;
2851 const uint32_t iterationCount = 10;
2852
2853 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2854 sampleBufCreateInfo.size = bufSizeMax;
2855 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2856
2857 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2858 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2859
2860 VmaPoolCreateInfo poolCreateInfo = {};
2861 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002862 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002863
2864 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002865 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002866 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2867
2868 VmaPool pool = nullptr;
2869 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002870 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002871
2872 // Buffer created just to get memory requirements. Never bound to any memory.
2873 VkBuffer dummyBuffer = VK_NULL_HANDLE;
Adam Sawicki1f84f622019-07-02 13:40:01 +02002874 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002875 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002876
2877 VkMemoryRequirements memReq = {};
2878 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2879
Adam Sawicki1f84f622019-07-02 13:40:01 +02002880 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawicki0a607132018-08-24 11:18:41 +02002881
2882 VmaAllocationCreateInfo allocCreateInfo = {};
2883 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002884 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002885
2886 VmaAllocation alloc;
2887 std::vector<VmaAllocation> baseAllocations;
2888
2889 if(!empty)
2890 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002891 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002892 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002893 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002894 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01002895 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
2896 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02002897 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2898 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002899 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002900 baseAllocations.push_back(alloc);
2901 totalSize += memReq.size;
2902 }
2903
2904 // Delete half of them, choose randomly.
2905 size_t allocsToDelete = baseAllocations.size() / 2;
2906 for(size_t i = 0; i < allocsToDelete; ++i)
2907 {
2908 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2909 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2910 baseAllocations.erase(baseAllocations.begin() + index);
2911 }
2912 }
2913
2914 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002915 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002916 std::vector<VmaAllocation> testAllocations;
2917 testAllocations.reserve(allocCount);
2918 duration allocTotalDuration = duration::zero();
2919 duration freeTotalDuration = duration::zero();
2920 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2921 {
2922 // Allocations
2923 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2924 for(size_t i = 0; i < allocCount; ++i)
2925 {
2926 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2927 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002928 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002929 testAllocations.push_back(alloc);
2930 }
2931 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2932
2933 // Deallocations
2934 switch(freeOrder)
2935 {
2936 case FREE_ORDER::FORWARD:
2937 // Leave testAllocations unchanged.
2938 break;
2939 case FREE_ORDER::BACKWARD:
2940 std::reverse(testAllocations.begin(), testAllocations.end());
2941 break;
2942 case FREE_ORDER::RANDOM:
2943 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2944 break;
2945 default: assert(0);
2946 }
2947
2948 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2949 for(size_t i = 0; i < allocCount; ++i)
2950 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2951 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2952
2953 testAllocations.clear();
2954 }
2955
2956 // Delete baseAllocations
2957 while(!baseAllocations.empty())
2958 {
2959 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2960 baseAllocations.pop_back();
2961 }
2962
2963 vmaDestroyPool(g_hAllocator, pool);
2964
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002965 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2966 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2967
Adam Sawicki80927152018-09-07 17:27:23 +02002968 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2969 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002970 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002971 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002972 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002973 allocTotalSeconds,
2974 freeTotalSeconds);
2975
2976 if(file)
2977 {
2978 std::string currTime;
2979 CurrentTimeToStr(currTime);
2980
Adam Sawicki80927152018-09-07 17:27:23 +02002981 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002982 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002983 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002984 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002985 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002986 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2987 allocTotalSeconds,
2988 freeTotalSeconds);
2989 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002990}
2991
Adam Sawicki80927152018-09-07 17:27:23 +02002992static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002993{
Adam Sawicki80927152018-09-07 17:27:23 +02002994 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002995
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002996 if(file)
2997 {
2998 fprintf(file,
2999 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02003000 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003001 "Allocation time (s),Deallocation time (s)\n");
3002 }
3003
Adam Sawicki0a607132018-08-24 11:18:41 +02003004 uint32_t freeOrderCount = 1;
3005 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
3006 freeOrderCount = 3;
3007 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
3008 freeOrderCount = 2;
3009
3010 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003011 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003012
3013 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3014 {
3015 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3016 switch(freeOrderIndex)
3017 {
3018 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3019 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3020 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3021 default: assert(0);
3022 }
3023
3024 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3025 {
Adam Sawicki80927152018-09-07 17:27:23 +02003026 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003027 {
Adam Sawicki80927152018-09-07 17:27:23 +02003028 uint32_t algorithm = 0;
3029 switch(algorithmIndex)
3030 {
3031 case 0:
3032 break;
3033 case 1:
3034 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3035 break;
3036 case 2:
3037 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3038 break;
3039 default:
3040 assert(0);
3041 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003042
Adam Sawicki80927152018-09-07 17:27:23 +02003043 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003044 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3045 {
3046 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003047 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003048 {
3049 switch(allocStrategyIndex)
3050 {
3051 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3052 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3053 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3054 default: assert(0);
3055 }
3056 }
3057
Adam Sawicki80927152018-09-07 17:27:23 +02003058 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003059 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003060 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003061 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003062 strategy,
3063 freeOrder); // freeOrder
3064 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003065 }
3066 }
3067 }
3068}
3069
Adam Sawickib8333fb2018-03-13 16:15:53 +01003070static void TestPool_SameSize()
3071{
3072 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3073 const size_t BUF_COUNT = 100;
3074 VkResult res;
3075
3076 RandomNumberGenerator rand{123};
3077
3078 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3079 bufferInfo.size = BUF_SIZE;
3080 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3081
3082 uint32_t memoryTypeBits = UINT32_MAX;
3083 {
3084 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003085 res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003086 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003087
3088 VkMemoryRequirements memReq;
3089 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3090 memoryTypeBits = memReq.memoryTypeBits;
3091
Adam Sawicki1f84f622019-07-02 13:40:01 +02003092 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003093 }
3094
3095 VmaAllocationCreateInfo poolAllocInfo = {};
3096 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3097 uint32_t memTypeIndex;
3098 res = vmaFindMemoryTypeIndex(
3099 g_hAllocator,
3100 memoryTypeBits,
3101 &poolAllocInfo,
3102 &memTypeIndex);
3103
3104 VmaPoolCreateInfo poolCreateInfo = {};
3105 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3106 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3107 poolCreateInfo.minBlockCount = 1;
3108 poolCreateInfo.maxBlockCount = 4;
3109 poolCreateInfo.frameInUseCount = 0;
3110
3111 VmaPool pool;
3112 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003113 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003114
Adam Sawickia020fb82019-11-02 14:43:06 +01003115 // Test pool name
3116 {
3117 static const char* const POOL_NAME = "Pool name";
3118 vmaSetPoolName(g_hAllocator, pool, POOL_NAME);
3119
3120 const char* fetchedPoolName = nullptr;
3121 vmaGetPoolName(g_hAllocator, pool, &fetchedPoolName);
3122 TEST(strcmp(fetchedPoolName, POOL_NAME) == 0);
3123
Adam Sawickia020fb82019-11-02 14:43:06 +01003124 vmaSetPoolName(g_hAllocator, pool, nullptr);
3125 }
3126
Adam Sawickib8333fb2018-03-13 16:15:53 +01003127 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3128
3129 VmaAllocationCreateInfo allocInfo = {};
3130 allocInfo.pool = pool;
3131 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3132 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3133
3134 struct BufItem
3135 {
3136 VkBuffer Buf;
3137 VmaAllocation Alloc;
3138 };
3139 std::vector<BufItem> items;
3140
3141 // Fill entire pool.
3142 for(size_t i = 0; i < BUF_COUNT; ++i)
3143 {
3144 BufItem item;
3145 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003146 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003147 items.push_back(item);
3148 }
3149
3150 // Make sure that another allocation would fail.
3151 {
3152 BufItem item;
3153 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003154 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003155 }
3156
3157 // Validate that no buffer is lost. Also check that they are not mapped.
3158 for(size_t i = 0; i < items.size(); ++i)
3159 {
3160 VmaAllocationInfo allocInfo;
3161 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003162 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3163 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003164 }
3165
3166 // Free some percent of random items.
3167 {
3168 const size_t PERCENT_TO_FREE = 10;
3169 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3170 for(size_t i = 0; i < itemsToFree; ++i)
3171 {
3172 size_t index = (size_t)rand.Generate() % items.size();
3173 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3174 items.erase(items.begin() + index);
3175 }
3176 }
3177
3178 // Randomly allocate and free items.
3179 {
3180 const size_t OPERATION_COUNT = BUF_COUNT;
3181 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3182 {
3183 bool allocate = rand.Generate() % 2 != 0;
3184 if(allocate)
3185 {
3186 if(items.size() < BUF_COUNT)
3187 {
3188 BufItem item;
3189 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003190 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003191 items.push_back(item);
3192 }
3193 }
3194 else // Free
3195 {
3196 if(!items.empty())
3197 {
3198 size_t index = (size_t)rand.Generate() % items.size();
3199 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3200 items.erase(items.begin() + index);
3201 }
3202 }
3203 }
3204 }
3205
3206 // Allocate up to maximum.
3207 while(items.size() < BUF_COUNT)
3208 {
3209 BufItem item;
3210 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003211 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003212 items.push_back(item);
3213 }
3214
3215 // Validate that no buffer is lost.
3216 for(size_t i = 0; i < items.size(); ++i)
3217 {
3218 VmaAllocationInfo allocInfo;
3219 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003220 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003221 }
3222
3223 // Next frame.
3224 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3225
3226 // Allocate another BUF_COUNT buffers.
3227 for(size_t i = 0; i < BUF_COUNT; ++i)
3228 {
3229 BufItem item;
3230 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003231 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003232 items.push_back(item);
3233 }
3234
3235 // Make sure the first BUF_COUNT is lost. Delete them.
3236 for(size_t i = 0; i < BUF_COUNT; ++i)
3237 {
3238 VmaAllocationInfo allocInfo;
3239 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003240 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003241 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3242 }
3243 items.erase(items.begin(), items.begin() + BUF_COUNT);
3244
3245 // Validate that no buffer is lost.
3246 for(size_t i = 0; i < items.size(); ++i)
3247 {
3248 VmaAllocationInfo allocInfo;
3249 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003250 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003251 }
3252
3253 // Free one item.
3254 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3255 items.pop_back();
3256
3257 // Validate statistics.
3258 {
3259 VmaPoolStats poolStats = {};
3260 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003261 TEST(poolStats.allocationCount == items.size());
3262 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3263 TEST(poolStats.unusedRangeCount == 1);
3264 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3265 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003266 }
3267
3268 // Free all remaining items.
3269 for(size_t i = items.size(); i--; )
3270 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3271 items.clear();
3272
3273 // Allocate maximum items again.
3274 for(size_t i = 0; i < BUF_COUNT; ++i)
3275 {
3276 BufItem item;
3277 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003278 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003279 items.push_back(item);
3280 }
3281
3282 // Delete every other item.
3283 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3284 {
3285 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3286 items.erase(items.begin() + i);
3287 }
3288
3289 // Defragment!
3290 {
3291 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3292 for(size_t i = 0; i < items.size(); ++i)
3293 allocationsToDefragment[i] = items[i].Alloc;
3294
3295 VmaDefragmentationStats defragmentationStats;
3296 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003297 TEST(res == VK_SUCCESS);
3298 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003299 }
3300
3301 // Free all remaining items.
3302 for(size_t i = items.size(); i--; )
3303 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3304 items.clear();
3305
3306 ////////////////////////////////////////////////////////////////////////////////
3307 // Test for vmaMakePoolAllocationsLost
3308
3309 // Allocate 4 buffers on frame 10.
3310 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3311 for(size_t i = 0; i < 4; ++i)
3312 {
3313 BufItem item;
3314 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003315 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003316 items.push_back(item);
3317 }
3318
3319 // Touch first 2 of them on frame 11.
3320 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3321 for(size_t i = 0; i < 2; ++i)
3322 {
3323 VmaAllocationInfo allocInfo;
3324 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3325 }
3326
3327 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3328 size_t lostCount = 0xDEADC0DE;
3329 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003330 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003331
3332 // Make another call. Now 0 should be lost.
3333 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003334 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003335
3336 // Make another call, with null count. Should not crash.
3337 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3338
3339 // END: Free all remaining items.
3340 for(size_t i = items.size(); i--; )
3341 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3342
3343 items.clear();
3344
Adam Sawickid2924172018-06-11 12:48:46 +02003345 ////////////////////////////////////////////////////////////////////////////////
3346 // Test for allocation too large for pool
3347
3348 {
3349 VmaAllocationCreateInfo allocCreateInfo = {};
3350 allocCreateInfo.pool = pool;
3351
3352 VkMemoryRequirements memReq;
3353 memReq.memoryTypeBits = UINT32_MAX;
3354 memReq.alignment = 1;
3355 memReq.size = poolCreateInfo.blockSize + 4;
3356
3357 VmaAllocation alloc = nullptr;
3358 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003359 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003360 }
3361
Adam Sawickib8333fb2018-03-13 16:15:53 +01003362 vmaDestroyPool(g_hAllocator, pool);
3363}
3364
Adam Sawickie44c6262018-06-15 14:30:39 +02003365static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3366{
3367 const uint8_t* pBytes = (const uint8_t*)pMemory;
3368 for(size_t i = 0; i < size; ++i)
3369 {
3370 if(pBytes[i] != pattern)
3371 {
3372 return false;
3373 }
3374 }
3375 return true;
3376}
3377
3378static void TestAllocationsInitialization()
3379{
3380 VkResult res;
3381
3382 const size_t BUF_SIZE = 1024;
3383
3384 // Create pool.
3385
3386 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3387 bufInfo.size = BUF_SIZE;
3388 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3389
3390 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3391 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3392
3393 VmaPoolCreateInfo poolCreateInfo = {};
3394 poolCreateInfo.blockSize = BUF_SIZE * 10;
3395 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3396 poolCreateInfo.maxBlockCount = 1;
3397 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003398 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003399
3400 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3401 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003402 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003403
3404 // Create one persistently mapped buffer to keep memory of this block mapped,
3405 // so that pointer to mapped data will remain (more or less...) valid even
3406 // after destruction of other allocations.
3407
3408 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3409 VkBuffer firstBuf;
3410 VmaAllocation firstAlloc;
3411 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003412 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003413
3414 // Test buffers.
3415
3416 for(uint32_t i = 0; i < 2; ++i)
3417 {
3418 const bool persistentlyMapped = i == 0;
3419 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3420 VkBuffer buf;
3421 VmaAllocation alloc;
3422 VmaAllocationInfo allocInfo;
3423 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003424 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003425
3426 void* pMappedData;
3427 if(!persistentlyMapped)
3428 {
3429 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003430 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003431 }
3432 else
3433 {
3434 pMappedData = allocInfo.pMappedData;
3435 }
3436
3437 // Validate initialized content
3438 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003439 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003440
3441 if(!persistentlyMapped)
3442 {
3443 vmaUnmapMemory(g_hAllocator, alloc);
3444 }
3445
3446 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3447
3448 // Validate freed content
3449 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003450 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003451 }
3452
3453 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3454 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3455}
3456
Adam Sawickib8333fb2018-03-13 16:15:53 +01003457static void TestPool_Benchmark(
3458 PoolTestResult& outResult,
3459 const PoolTestConfig& config)
3460{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003461 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003462
3463 RandomNumberGenerator mainRand{config.RandSeed};
3464
3465 uint32_t allocationSizeProbabilitySum = std::accumulate(
3466 config.AllocationSizes.begin(),
3467 config.AllocationSizes.end(),
3468 0u,
3469 [](uint32_t sum, const AllocationSize& allocSize) {
3470 return sum + allocSize.Probability;
3471 });
3472
3473 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3474 bufferInfo.size = 256; // Whatever.
3475 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3476
3477 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3478 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3479 imageInfo.extent.width = 256; // Whatever.
3480 imageInfo.extent.height = 256; // Whatever.
3481 imageInfo.extent.depth = 1;
3482 imageInfo.mipLevels = 1;
3483 imageInfo.arrayLayers = 1;
3484 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3485 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3486 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3487 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3488 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3489
3490 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3491 {
3492 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003493 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003494 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003495
3496 VkMemoryRequirements memReq;
3497 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3498 bufferMemoryTypeBits = memReq.memoryTypeBits;
3499
Adam Sawicki1f84f622019-07-02 13:40:01 +02003500 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003501 }
3502
3503 uint32_t imageMemoryTypeBits = UINT32_MAX;
3504 {
3505 VkImage dummyImage;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003506 VkResult res = vkCreateImage(g_hDevice, &imageInfo, g_Allocs, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003507 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003508
3509 VkMemoryRequirements memReq;
3510 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3511 imageMemoryTypeBits = memReq.memoryTypeBits;
3512
Adam Sawicki1f84f622019-07-02 13:40:01 +02003513 vkDestroyImage(g_hDevice, dummyImage, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003514 }
3515
3516 uint32_t memoryTypeBits = 0;
3517 if(config.UsesBuffers() && config.UsesImages())
3518 {
3519 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3520 if(memoryTypeBits == 0)
3521 {
3522 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3523 return;
3524 }
3525 }
3526 else if(config.UsesBuffers())
3527 memoryTypeBits = bufferMemoryTypeBits;
3528 else if(config.UsesImages())
3529 memoryTypeBits = imageMemoryTypeBits;
3530 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003531 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003532
3533 VmaPoolCreateInfo poolCreateInfo = {};
3534 poolCreateInfo.memoryTypeIndex = 0;
3535 poolCreateInfo.minBlockCount = 1;
3536 poolCreateInfo.maxBlockCount = 1;
3537 poolCreateInfo.blockSize = config.PoolSize;
3538 poolCreateInfo.frameInUseCount = 1;
3539
3540 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3541 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3542 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3543
3544 VmaPool pool;
3545 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003546 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003547
3548 // Start time measurement - after creating pool and initializing data structures.
3549 time_point timeBeg = std::chrono::high_resolution_clock::now();
3550
3551 ////////////////////////////////////////////////////////////////////////////////
3552 // ThreadProc
3553 auto ThreadProc = [&](
3554 PoolTestThreadResult* outThreadResult,
3555 uint32_t randSeed,
3556 HANDLE frameStartEvent,
3557 HANDLE frameEndEvent) -> void
3558 {
3559 RandomNumberGenerator threadRand{randSeed};
3560
3561 outThreadResult->AllocationTimeMin = duration::max();
3562 outThreadResult->AllocationTimeSum = duration::zero();
3563 outThreadResult->AllocationTimeMax = duration::min();
3564 outThreadResult->DeallocationTimeMin = duration::max();
3565 outThreadResult->DeallocationTimeSum = duration::zero();
3566 outThreadResult->DeallocationTimeMax = duration::min();
3567 outThreadResult->AllocationCount = 0;
3568 outThreadResult->DeallocationCount = 0;
3569 outThreadResult->LostAllocationCount = 0;
3570 outThreadResult->LostAllocationTotalSize = 0;
3571 outThreadResult->FailedAllocationCount = 0;
3572 outThreadResult->FailedAllocationTotalSize = 0;
3573
3574 struct Item
3575 {
3576 VkDeviceSize BufferSize;
3577 VkExtent2D ImageSize;
3578 VkBuffer Buf;
3579 VkImage Image;
3580 VmaAllocation Alloc;
3581
3582 VkDeviceSize CalcSizeBytes() const
3583 {
3584 return BufferSize +
3585 ImageSize.width * ImageSize.height * 4;
3586 }
3587 };
3588 std::vector<Item> unusedItems, usedItems;
3589
3590 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3591
3592 // Create all items - all unused, not yet allocated.
3593 for(size_t i = 0; i < threadTotalItemCount; ++i)
3594 {
3595 Item item = {};
3596
3597 uint32_t allocSizeIndex = 0;
3598 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3599 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3600 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3601
3602 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3603 if(allocSize.BufferSizeMax > 0)
3604 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003605 TEST(allocSize.BufferSizeMin > 0);
3606 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003607 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3608 item.BufferSize = allocSize.BufferSizeMin;
3609 else
3610 {
3611 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3612 item.BufferSize = item.BufferSize / 16 * 16;
3613 }
3614 }
3615 else
3616 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003617 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003618 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3619 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3620 else
3621 {
3622 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3623 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3624 }
3625 }
3626
3627 unusedItems.push_back(item);
3628 }
3629
3630 auto Allocate = [&](Item& item) -> VkResult
3631 {
3632 VmaAllocationCreateInfo allocCreateInfo = {};
3633 allocCreateInfo.pool = pool;
3634 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3635 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3636
3637 if(item.BufferSize)
3638 {
3639 bufferInfo.size = item.BufferSize;
3640 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3641 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3642 }
3643 else
3644 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003645 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003646
3647 imageInfo.extent.width = item.ImageSize.width;
3648 imageInfo.extent.height = item.ImageSize.height;
3649 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3650 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3651 }
3652 };
3653
3654 ////////////////////////////////////////////////////////////////////////////////
3655 // Frames
3656 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3657 {
3658 WaitForSingleObject(frameStartEvent, INFINITE);
3659
3660 // Always make some percent of used bufs unused, to choose different used ones.
3661 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3662 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3663 {
3664 size_t index = threadRand.Generate() % usedItems.size();
3665 unusedItems.push_back(usedItems[index]);
3666 usedItems.erase(usedItems.begin() + index);
3667 }
3668
3669 // Determine which bufs we want to use in this frame.
3670 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3671 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003672 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003673 // Move some used to unused.
3674 while(usedBufCount < usedItems.size())
3675 {
3676 size_t index = threadRand.Generate() % usedItems.size();
3677 unusedItems.push_back(usedItems[index]);
3678 usedItems.erase(usedItems.begin() + index);
3679 }
3680 // Move some unused to used.
3681 while(usedBufCount > usedItems.size())
3682 {
3683 size_t index = threadRand.Generate() % unusedItems.size();
3684 usedItems.push_back(unusedItems[index]);
3685 unusedItems.erase(unusedItems.begin() + index);
3686 }
3687
3688 uint32_t touchExistingCount = 0;
3689 uint32_t touchLostCount = 0;
3690 uint32_t createSucceededCount = 0;
3691 uint32_t createFailedCount = 0;
3692
3693 // Touch all used bufs. If not created or lost, allocate.
3694 for(size_t i = 0; i < usedItems.size(); ++i)
3695 {
3696 Item& item = usedItems[i];
3697 // Not yet created.
3698 if(item.Alloc == VK_NULL_HANDLE)
3699 {
3700 res = Allocate(item);
3701 ++outThreadResult->AllocationCount;
3702 if(res != VK_SUCCESS)
3703 {
3704 item.Alloc = VK_NULL_HANDLE;
3705 item.Buf = VK_NULL_HANDLE;
3706 ++outThreadResult->FailedAllocationCount;
3707 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3708 ++createFailedCount;
3709 }
3710 else
3711 ++createSucceededCount;
3712 }
3713 else
3714 {
3715 // Touch.
3716 VmaAllocationInfo allocInfo;
3717 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3718 // Lost.
3719 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3720 {
3721 ++touchLostCount;
3722
3723 // Destroy.
3724 {
3725 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3726 if(item.Buf)
3727 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3728 else
3729 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3730 ++outThreadResult->DeallocationCount;
3731 }
3732 item.Alloc = VK_NULL_HANDLE;
3733 item.Buf = VK_NULL_HANDLE;
3734
3735 ++outThreadResult->LostAllocationCount;
3736 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3737
3738 // Recreate.
3739 res = Allocate(item);
3740 ++outThreadResult->AllocationCount;
3741 // Creation failed.
3742 if(res != VK_SUCCESS)
3743 {
3744 ++outThreadResult->FailedAllocationCount;
3745 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3746 ++createFailedCount;
3747 }
3748 else
3749 ++createSucceededCount;
3750 }
3751 else
3752 ++touchExistingCount;
3753 }
3754 }
3755
3756 /*
3757 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3758 randSeed, frameIndex,
3759 touchExistingCount, touchLostCount,
3760 createSucceededCount, createFailedCount);
3761 */
3762
3763 SetEvent(frameEndEvent);
3764 }
3765
3766 // Free all remaining items.
3767 for(size_t i = usedItems.size(); i--; )
3768 {
3769 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3770 if(usedItems[i].Buf)
3771 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3772 else
3773 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3774 ++outThreadResult->DeallocationCount;
3775 }
3776 for(size_t i = unusedItems.size(); i--; )
3777 {
3778 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3779 if(unusedItems[i].Buf)
3780 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3781 else
3782 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3783 ++outThreadResult->DeallocationCount;
3784 }
3785 };
3786
3787 // Launch threads.
3788 uint32_t threadRandSeed = mainRand.Generate();
3789 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3790 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3791 std::vector<std::thread> bkgThreads;
3792 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3793 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3794 {
3795 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3796 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3797 bkgThreads.emplace_back(std::bind(
3798 ThreadProc,
3799 &threadResults[threadIndex],
3800 threadRandSeed + threadIndex,
3801 frameStartEvents[threadIndex],
3802 frameEndEvents[threadIndex]));
3803 }
3804
3805 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003806 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003807 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3808 {
3809 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3810 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3811 SetEvent(frameStartEvents[threadIndex]);
3812 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3813 }
3814
3815 // Wait for threads finished
3816 for(size_t i = 0; i < bkgThreads.size(); ++i)
3817 {
3818 bkgThreads[i].join();
3819 CloseHandle(frameEndEvents[i]);
3820 CloseHandle(frameStartEvents[i]);
3821 }
3822 bkgThreads.clear();
3823
3824 // Finish time measurement - before destroying pool.
3825 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3826
3827 vmaDestroyPool(g_hAllocator, pool);
3828
3829 outResult.AllocationTimeMin = duration::max();
3830 outResult.AllocationTimeAvg = duration::zero();
3831 outResult.AllocationTimeMax = duration::min();
3832 outResult.DeallocationTimeMin = duration::max();
3833 outResult.DeallocationTimeAvg = duration::zero();
3834 outResult.DeallocationTimeMax = duration::min();
3835 outResult.LostAllocationCount = 0;
3836 outResult.LostAllocationTotalSize = 0;
3837 outResult.FailedAllocationCount = 0;
3838 outResult.FailedAllocationTotalSize = 0;
3839 size_t allocationCount = 0;
3840 size_t deallocationCount = 0;
3841 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3842 {
3843 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3844 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3845 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3846 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3847 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3848 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3849 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3850 allocationCount += threadResult.AllocationCount;
3851 deallocationCount += threadResult.DeallocationCount;
3852 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3853 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3854 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3855 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3856 }
3857 if(allocationCount)
3858 outResult.AllocationTimeAvg /= allocationCount;
3859 if(deallocationCount)
3860 outResult.DeallocationTimeAvg /= deallocationCount;
3861}
3862
3863static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3864{
3865 if(ptr1 < ptr2)
3866 return ptr1 + size1 > ptr2;
3867 else if(ptr2 < ptr1)
3868 return ptr2 + size2 > ptr1;
3869 else
3870 return true;
3871}
3872
Adam Sawickiefa88c42019-11-18 16:33:56 +01003873static void TestMemoryUsage()
3874{
3875 wprintf(L"Testing memory usage:\n");
3876
Adam Sawicki69185552019-11-18 17:03:34 +01003877 static const VmaMemoryUsage lastUsage = VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED;
Adam Sawickiefa88c42019-11-18 16:33:56 +01003878 for(uint32_t usage = 0; usage <= lastUsage; ++usage)
3879 {
3880 switch(usage)
3881 {
3882 case VMA_MEMORY_USAGE_UNKNOWN: printf(" VMA_MEMORY_USAGE_UNKNOWN:\n"); break;
3883 case VMA_MEMORY_USAGE_GPU_ONLY: printf(" VMA_MEMORY_USAGE_GPU_ONLY:\n"); break;
3884 case VMA_MEMORY_USAGE_CPU_ONLY: printf(" VMA_MEMORY_USAGE_CPU_ONLY:\n"); break;
3885 case VMA_MEMORY_USAGE_CPU_TO_GPU: printf(" VMA_MEMORY_USAGE_CPU_TO_GPU:\n"); break;
3886 case VMA_MEMORY_USAGE_GPU_TO_CPU: printf(" VMA_MEMORY_USAGE_GPU_TO_CPU:\n"); break;
3887 case VMA_MEMORY_USAGE_CPU_COPY: printf(" VMA_MEMORY_USAGE_CPU_COPY:\n"); break;
Adam Sawicki69185552019-11-18 17:03:34 +01003888 case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED: printf(" VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED:\n"); break;
Adam Sawickiefa88c42019-11-18 16:33:56 +01003889 default: assert(0);
3890 }
3891
3892 auto printResult = [](const char* testName, VkResult res, uint32_t memoryTypeBits, uint32_t memoryTypeIndex)
3893 {
3894 if(res == VK_SUCCESS)
3895 printf(" %s: memoryTypeBits=0x%X, memoryTypeIndex=%u\n", testName, memoryTypeBits, memoryTypeIndex);
3896 else
3897 printf(" %s: memoryTypeBits=0x%X, FAILED with res=%d\n", testName, memoryTypeBits, (int32_t)res);
3898 };
3899
3900 // 1: Buffer for copy
3901 {
3902 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3903 bufCreateInfo.size = 65536;
3904 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3905
3906 VkBuffer buf = VK_NULL_HANDLE;
3907 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
3908 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
3909
3910 VkMemoryRequirements memReq = {};
3911 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
3912
3913 VmaAllocationCreateInfo allocCreateInfo = {};
3914 allocCreateInfo.usage = (VmaMemoryUsage)usage;
3915 VmaAllocation alloc = VK_NULL_HANDLE;
3916 VmaAllocationInfo allocInfo = {};
3917 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
3918 if(res == VK_SUCCESS)
3919 {
3920 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
3921 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
3922 TEST(res == VK_SUCCESS);
3923 }
3924 printResult("Buffer TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
3925 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3926 }
3927
3928 // 2: Vertex buffer
3929 {
3930 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3931 bufCreateInfo.size = 65536;
3932 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
3933
3934 VkBuffer buf = VK_NULL_HANDLE;
3935 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
3936 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
3937
3938 VkMemoryRequirements memReq = {};
3939 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
3940
3941 VmaAllocationCreateInfo allocCreateInfo = {};
3942 allocCreateInfo.usage = (VmaMemoryUsage)usage;
3943 VmaAllocation alloc = VK_NULL_HANDLE;
3944 VmaAllocationInfo allocInfo = {};
3945 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
3946 if(res == VK_SUCCESS)
3947 {
3948 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
3949 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
3950 TEST(res == VK_SUCCESS);
3951 }
3952 printResult("Buffer TRANSFER_DST + VERTEX_BUFFER", res, memReq.memoryTypeBits, allocInfo.memoryType);
3953 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3954 }
3955
3956 // 3: Image for copy, OPTIMAL
3957 {
3958 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3959 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
3960 imgCreateInfo.extent.width = 256;
3961 imgCreateInfo.extent.height = 256;
3962 imgCreateInfo.extent.depth = 1;
3963 imgCreateInfo.mipLevels = 1;
3964 imgCreateInfo.arrayLayers = 1;
3965 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3966 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
3967 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
3968 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
3969 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3970
3971 VkImage img = VK_NULL_HANDLE;
3972 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
3973 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
3974
3975 VkMemoryRequirements memReq = {};
3976 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
3977
3978 VmaAllocationCreateInfo allocCreateInfo = {};
3979 allocCreateInfo.usage = (VmaMemoryUsage)usage;
3980 VmaAllocation alloc = VK_NULL_HANDLE;
3981 VmaAllocationInfo allocInfo = {};
3982 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
3983 if(res == VK_SUCCESS)
3984 {
3985 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
3986 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
3987 TEST(res == VK_SUCCESS);
3988 }
3989 printResult("Image OPTIMAL TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
3990
3991 vmaDestroyImage(g_hAllocator, img, alloc);
3992 }
3993
3994 // 4: Image SAMPLED, OPTIMAL
3995 {
3996 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3997 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
3998 imgCreateInfo.extent.width = 256;
3999 imgCreateInfo.extent.height = 256;
4000 imgCreateInfo.extent.depth = 1;
4001 imgCreateInfo.mipLevels = 1;
4002 imgCreateInfo.arrayLayers = 1;
4003 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4004 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4005 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4006 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
4007 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4008
4009 VkImage img = VK_NULL_HANDLE;
4010 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4011 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4012
4013 VkMemoryRequirements memReq = {};
4014 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4015
4016 VmaAllocationCreateInfo allocCreateInfo = {};
4017 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4018 VmaAllocation alloc = VK_NULL_HANDLE;
4019 VmaAllocationInfo allocInfo = {};
4020 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4021 if(res == VK_SUCCESS)
4022 {
4023 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4024 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4025 TEST(res == VK_SUCCESS);
4026 }
4027 printResult("Image OPTIMAL TRANSFER_DST + SAMPLED", res, memReq.memoryTypeBits, allocInfo.memoryType);
4028 vmaDestroyImage(g_hAllocator, img, alloc);
4029 }
4030
4031 // 5: Image COLOR_ATTACHMENT, OPTIMAL
4032 {
4033 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4034 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4035 imgCreateInfo.extent.width = 256;
4036 imgCreateInfo.extent.height = 256;
4037 imgCreateInfo.extent.depth = 1;
4038 imgCreateInfo.mipLevels = 1;
4039 imgCreateInfo.arrayLayers = 1;
4040 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4041 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4042 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4043 imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4044 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4045
4046 VkImage img = VK_NULL_HANDLE;
4047 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4048 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4049
4050 VkMemoryRequirements memReq = {};
4051 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4052
4053 VmaAllocationCreateInfo allocCreateInfo = {};
4054 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4055 VmaAllocation alloc = VK_NULL_HANDLE;
4056 VmaAllocationInfo allocInfo = {};
4057 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4058 if(res == VK_SUCCESS)
4059 {
4060 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4061 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4062 TEST(res == VK_SUCCESS);
4063 }
4064 printResult("Image OPTIMAL SAMPLED + COLOR_ATTACHMENT", res, memReq.memoryTypeBits, allocInfo.memoryType);
4065 vmaDestroyImage(g_hAllocator, img, alloc);
4066 }
4067 }
4068}
4069
Adam Sawicki40ffe982019-10-11 15:56:02 +02004070static void TestBudget()
4071{
4072 wprintf(L"Testing budget...\n");
4073
Adam Sawicki353e3672019-11-02 14:12:05 +01004074 static const VkDeviceSize BUF_SIZE = 100ull * 1024 * 1024;
4075 static const uint32_t BUF_COUNT = 4;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004076
4077 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
4078 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004079 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
4080
4081 VmaBudget budgetBeg[VK_MAX_MEMORY_HEAPS] = {};
4082 vmaGetBudget(g_hAllocator, budgetBeg);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004083
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01004084 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
4085 {
4086 TEST(budgetBeg[i].allocationBytes <= budgetBeg[i].blockBytes);
4087 }
4088
Adam Sawicki40ffe982019-10-11 15:56:02 +02004089 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4090 bufInfo.size = BUF_SIZE;
4091 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4092
4093 VmaAllocationCreateInfo allocCreateInfo = {};
4094 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4095 if(testIndex == 0)
4096 {
4097 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4098 }
4099
4100 // CREATE BUFFERS
4101 uint32_t heapIndex = 0;
4102 BufferInfo bufInfos[BUF_COUNT] = {};
4103 for(uint32_t bufIndex = 0; bufIndex < BUF_COUNT; ++bufIndex)
4104 {
4105 VmaAllocationInfo allocInfo;
4106 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4107 &bufInfos[bufIndex].Buffer, &bufInfos[bufIndex].Allocation, &allocInfo);
4108 TEST(res == VK_SUCCESS);
4109 if(bufIndex == 0)
4110 {
4111 heapIndex = MemoryTypeToHeap(allocInfo.memoryType);
4112 }
4113 else
4114 {
4115 // All buffers need to fall into the same heap.
4116 TEST(MemoryTypeToHeap(allocInfo.memoryType) == heapIndex);
4117 }
4118 }
4119
Adam Sawicki353e3672019-11-02 14:12:05 +01004120 VmaBudget budgetWithBufs[VK_MAX_MEMORY_HEAPS] = {};
4121 vmaGetBudget(g_hAllocator, budgetWithBufs);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004122
4123 // DESTROY BUFFERS
4124 for(size_t bufIndex = BUF_COUNT; bufIndex--; )
4125 {
4126 vmaDestroyBuffer(g_hAllocator, bufInfos[bufIndex].Buffer, bufInfos[bufIndex].Allocation);
4127 }
4128
Adam Sawicki353e3672019-11-02 14:12:05 +01004129 VmaBudget budgetEnd[VK_MAX_MEMORY_HEAPS] = {};
4130 vmaGetBudget(g_hAllocator, budgetEnd);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004131
4132 // CHECK
4133 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
4134 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004135 TEST(budgetEnd[i].allocationBytes <= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004136 if(i == heapIndex)
4137 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004138 TEST(budgetEnd[i].allocationBytes == budgetBeg[i].allocationBytes);
4139 TEST(budgetWithBufs[i].allocationBytes == budgetBeg[i].allocationBytes + BUF_SIZE * BUF_COUNT);
4140 TEST(budgetWithBufs[i].blockBytes >= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004141 }
4142 else
4143 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004144 TEST(budgetEnd[i].allocationBytes == budgetEnd[i].allocationBytes &&
4145 budgetEnd[i].allocationBytes == budgetWithBufs[i].allocationBytes);
4146 TEST(budgetEnd[i].blockBytes == budgetEnd[i].blockBytes &&
4147 budgetEnd[i].blockBytes == budgetWithBufs[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004148 }
4149 }
4150 }
4151}
4152
Adam Sawickib8333fb2018-03-13 16:15:53 +01004153static void TestMapping()
4154{
4155 wprintf(L"Testing mapping...\n");
4156
4157 VkResult res;
4158 uint32_t memTypeIndex = UINT32_MAX;
4159
4160 enum TEST
4161 {
4162 TEST_NORMAL,
4163 TEST_POOL,
4164 TEST_DEDICATED,
4165 TEST_COUNT
4166 };
4167 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4168 {
4169 VmaPool pool = nullptr;
4170 if(testIndex == TEST_POOL)
4171 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004172 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004173 VmaPoolCreateInfo poolInfo = {};
4174 poolInfo.memoryTypeIndex = memTypeIndex;
4175 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004176 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004177 }
4178
4179 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4180 bufInfo.size = 0x10000;
4181 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004182
Adam Sawickib8333fb2018-03-13 16:15:53 +01004183 VmaAllocationCreateInfo allocCreateInfo = {};
4184 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4185 allocCreateInfo.pool = pool;
4186 if(testIndex == TEST_DEDICATED)
4187 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004188
Adam Sawickib8333fb2018-03-13 16:15:53 +01004189 VmaAllocationInfo allocInfo;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004190
Adam Sawickib8333fb2018-03-13 16:15:53 +01004191 // Mapped manually
4192
4193 // Create 2 buffers.
4194 BufferInfo bufferInfos[3];
4195 for(size_t i = 0; i < 2; ++i)
4196 {
4197 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4198 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004199 TEST(res == VK_SUCCESS);
4200 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004201 memTypeIndex = allocInfo.memoryType;
4202 }
Adam Sawicki40ffe982019-10-11 15:56:02 +02004203
Adam Sawickib8333fb2018-03-13 16:15:53 +01004204 // Map buffer 0.
4205 char* data00 = nullptr;
4206 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004207 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004208 data00[0xFFFF] = data00[0];
4209
4210 // Map buffer 0 second time.
4211 char* data01 = nullptr;
4212 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004213 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004214
4215 // Map buffer 1.
4216 char* data1 = nullptr;
4217 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004218 TEST(res == VK_SUCCESS && data1 != nullptr);
4219 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004220 data1[0xFFFF] = data1[0];
4221
4222 // Unmap buffer 0 two times.
4223 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4224 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4225 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004226 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004227
4228 // Unmap buffer 1.
4229 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4230 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004231 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004232
4233 // Create 3rd buffer - persistently mapped.
4234 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4235 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4236 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004237 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004238
4239 // Map buffer 2.
4240 char* data2 = nullptr;
4241 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004242 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004243 data2[0xFFFF] = data2[0];
4244
4245 // Unmap buffer 2.
4246 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4247 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004248 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004249
4250 // Destroy all buffers.
4251 for(size_t i = 3; i--; )
4252 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4253
4254 vmaDestroyPool(g_hAllocator, pool);
4255 }
4256}
4257
Adam Sawickidaa6a552019-06-25 15:26:37 +02004258// Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
4259static void TestDeviceLocalMapped()
4260{
4261 VkResult res;
4262
4263 for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
4264 {
4265 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4266 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4267 bufCreateInfo.size = 4096;
4268
4269 VmaPool pool = VK_NULL_HANDLE;
4270 VmaAllocationCreateInfo allocCreateInfo = {};
4271 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4272 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
4273 if(testIndex == 2)
4274 {
4275 VmaPoolCreateInfo poolCreateInfo = {};
4276 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4277 TEST(res == VK_SUCCESS);
4278 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
4279 TEST(res == VK_SUCCESS);
4280 allocCreateInfo.pool = pool;
4281 }
4282 else if(testIndex == 1)
4283 {
4284 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
4285 }
4286
4287 VkBuffer buf = VK_NULL_HANDLE;
4288 VmaAllocation alloc = VK_NULL_HANDLE;
4289 VmaAllocationInfo allocInfo = {};
4290 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
4291 TEST(res == VK_SUCCESS && alloc);
4292
4293 VkMemoryPropertyFlags memTypeFlags = 0;
4294 vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
4295 const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
4296 TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
4297
4298 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4299 vmaDestroyPool(g_hAllocator, pool);
4300 }
4301}
4302
Adam Sawickib8333fb2018-03-13 16:15:53 +01004303static void TestMappingMultithreaded()
4304{
4305 wprintf(L"Testing mapping multithreaded...\n");
4306
4307 static const uint32_t threadCount = 16;
4308 static const uint32_t bufferCount = 1024;
4309 static const uint32_t threadBufferCount = bufferCount / threadCount;
4310
4311 VkResult res;
4312 volatile uint32_t memTypeIndex = UINT32_MAX;
4313
4314 enum TEST
4315 {
4316 TEST_NORMAL,
4317 TEST_POOL,
4318 TEST_DEDICATED,
4319 TEST_COUNT
4320 };
4321 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4322 {
4323 VmaPool pool = nullptr;
4324 if(testIndex == TEST_POOL)
4325 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004326 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004327 VmaPoolCreateInfo poolInfo = {};
4328 poolInfo.memoryTypeIndex = memTypeIndex;
4329 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004330 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004331 }
4332
4333 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4334 bufCreateInfo.size = 0x10000;
4335 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4336
4337 VmaAllocationCreateInfo allocCreateInfo = {};
4338 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4339 allocCreateInfo.pool = pool;
4340 if(testIndex == TEST_DEDICATED)
4341 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4342
4343 std::thread threads[threadCount];
4344 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4345 {
4346 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4347 // ======== THREAD FUNCTION ========
4348
4349 RandomNumberGenerator rand{threadIndex};
4350
4351 enum class MODE
4352 {
4353 // Don't map this buffer at all.
4354 DONT_MAP,
4355 // Map and quickly unmap.
4356 MAP_FOR_MOMENT,
4357 // Map and unmap before destruction.
4358 MAP_FOR_LONGER,
4359 // Map two times. Quickly unmap, second unmap before destruction.
4360 MAP_TWO_TIMES,
4361 // Create this buffer as persistently mapped.
4362 PERSISTENTLY_MAPPED,
4363 COUNT
4364 };
4365 std::vector<BufferInfo> bufInfos{threadBufferCount};
4366 std::vector<MODE> bufModes{threadBufferCount};
4367
4368 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4369 {
4370 BufferInfo& bufInfo = bufInfos[bufferIndex];
4371 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4372 bufModes[bufferIndex] = mode;
4373
4374 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4375 if(mode == MODE::PERSISTENTLY_MAPPED)
4376 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4377
4378 VmaAllocationInfo allocInfo;
4379 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4380 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004381 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004382
4383 if(memTypeIndex == UINT32_MAX)
4384 memTypeIndex = allocInfo.memoryType;
4385
4386 char* data = nullptr;
4387
4388 if(mode == MODE::PERSISTENTLY_MAPPED)
4389 {
4390 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004391 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004392 }
4393 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4394 mode == MODE::MAP_TWO_TIMES)
4395 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004396 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004397 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004398 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004399
4400 if(mode == MODE::MAP_TWO_TIMES)
4401 {
4402 char* data2 = nullptr;
4403 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004404 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004405 }
4406 }
4407 else if(mode == MODE::DONT_MAP)
4408 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004409 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004410 }
4411 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004412 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004413
4414 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4415 if(data)
4416 data[0xFFFF] = data[0];
4417
4418 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4419 {
4420 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4421
4422 VmaAllocationInfo allocInfo;
4423 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4424 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004425 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004426 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004427 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004428 }
4429
4430 switch(rand.Generate() % 3)
4431 {
4432 case 0: Sleep(0); break; // Yield.
4433 case 1: Sleep(10); break; // 10 ms
4434 // default: No sleep.
4435 }
4436
4437 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4438 if(data)
4439 data[0xFFFF] = data[0];
4440 }
4441
4442 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4443 {
4444 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4445 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4446 {
4447 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4448
4449 VmaAllocationInfo allocInfo;
4450 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004451 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004452 }
4453
4454 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4455 }
4456 });
4457 }
4458
4459 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4460 threads[threadIndex].join();
4461
4462 vmaDestroyPool(g_hAllocator, pool);
4463 }
4464}
4465
4466static void WriteMainTestResultHeader(FILE* file)
4467{
4468 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004469 "Code,Time,"
4470 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004471 "Total Time (us),"
4472 "Allocation Time Min (us),"
4473 "Allocation Time Avg (us),"
4474 "Allocation Time Max (us),"
4475 "Deallocation Time Min (us),"
4476 "Deallocation Time Avg (us),"
4477 "Deallocation Time Max (us),"
4478 "Total Memory Allocated (B),"
4479 "Free Range Size Avg (B),"
4480 "Free Range Size Max (B)\n");
4481}
4482
4483static void WriteMainTestResult(
4484 FILE* file,
4485 const char* codeDescription,
4486 const char* testDescription,
4487 const Config& config, const Result& result)
4488{
4489 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4490 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4491 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4492 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4493 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4494 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4495 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4496
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004497 std::string currTime;
4498 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004499
4500 fprintf(file,
4501 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004502 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4503 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004504 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004505 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004506 totalTimeSeconds * 1e6f,
4507 allocationTimeMinSeconds * 1e6f,
4508 allocationTimeAvgSeconds * 1e6f,
4509 allocationTimeMaxSeconds * 1e6f,
4510 deallocationTimeMinSeconds * 1e6f,
4511 deallocationTimeAvgSeconds * 1e6f,
4512 deallocationTimeMaxSeconds * 1e6f,
4513 result.TotalMemoryAllocated,
4514 result.FreeRangeSizeAvg,
4515 result.FreeRangeSizeMax);
4516}
4517
4518static void WritePoolTestResultHeader(FILE* file)
4519{
4520 fprintf(file,
4521 "Code,Test,Time,"
4522 "Config,"
4523 "Total Time (us),"
4524 "Allocation Time Min (us),"
4525 "Allocation Time Avg (us),"
4526 "Allocation Time Max (us),"
4527 "Deallocation Time Min (us),"
4528 "Deallocation Time Avg (us),"
4529 "Deallocation Time Max (us),"
4530 "Lost Allocation Count,"
4531 "Lost Allocation Total Size (B),"
4532 "Failed Allocation Count,"
4533 "Failed Allocation Total Size (B)\n");
4534}
4535
4536static void WritePoolTestResult(
4537 FILE* file,
4538 const char* codeDescription,
4539 const char* testDescription,
4540 const PoolTestConfig& config,
4541 const PoolTestResult& result)
4542{
4543 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4544 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4545 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4546 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4547 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4548 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4549 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4550
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004551 std::string currTime;
4552 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004553
4554 fprintf(file,
4555 "%s,%s,%s,"
4556 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4557 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4558 // General
4559 codeDescription,
4560 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004561 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004562 // Config
4563 config.ThreadCount,
4564 (unsigned long long)config.PoolSize,
4565 config.FrameCount,
4566 config.TotalItemCount,
4567 config.UsedItemCountMin,
4568 config.UsedItemCountMax,
4569 config.ItemsToMakeUnusedPercent,
4570 // Results
4571 totalTimeSeconds * 1e6f,
4572 allocationTimeMinSeconds * 1e6f,
4573 allocationTimeAvgSeconds * 1e6f,
4574 allocationTimeMaxSeconds * 1e6f,
4575 deallocationTimeMinSeconds * 1e6f,
4576 deallocationTimeAvgSeconds * 1e6f,
4577 deallocationTimeMaxSeconds * 1e6f,
4578 result.LostAllocationCount,
4579 result.LostAllocationTotalSize,
4580 result.FailedAllocationCount,
4581 result.FailedAllocationTotalSize);
4582}
4583
4584static void PerformCustomMainTest(FILE* file)
4585{
4586 Config config{};
4587 config.RandSeed = 65735476;
4588 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4589 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4590 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4591 config.FreeOrder = FREE_ORDER::FORWARD;
4592 config.ThreadCount = 16;
4593 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004594 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004595
4596 // Buffers
4597 //config.AllocationSizes.push_back({4, 16, 1024});
4598 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4599
4600 // Images
4601 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4602 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4603
4604 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4605 config.AdditionalOperationCount = 1024;
4606
4607 Result result{};
4608 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004609 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004610 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4611}
4612
4613static void PerformCustomPoolTest(FILE* file)
4614{
4615 PoolTestConfig config;
4616 config.PoolSize = 100 * 1024 * 1024;
4617 config.RandSeed = 2345764;
4618 config.ThreadCount = 1;
4619 config.FrameCount = 200;
4620 config.ItemsToMakeUnusedPercent = 2;
4621
4622 AllocationSize allocSize = {};
4623 allocSize.BufferSizeMin = 1024;
4624 allocSize.BufferSizeMax = 1024 * 1024;
4625 allocSize.Probability = 1;
4626 config.AllocationSizes.push_back(allocSize);
4627
4628 allocSize.BufferSizeMin = 0;
4629 allocSize.BufferSizeMax = 0;
4630 allocSize.ImageSizeMin = 128;
4631 allocSize.ImageSizeMax = 1024;
4632 allocSize.Probability = 1;
4633 config.AllocationSizes.push_back(allocSize);
4634
4635 config.PoolSize = config.CalcAvgResourceSize() * 200;
4636 config.UsedItemCountMax = 160;
4637 config.TotalItemCount = config.UsedItemCountMax * 10;
4638 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4639
4640 g_MemoryAliasingWarningEnabled = false;
4641 PoolTestResult result = {};
4642 TestPool_Benchmark(result, config);
4643 g_MemoryAliasingWarningEnabled = true;
4644
4645 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4646}
4647
Adam Sawickib8333fb2018-03-13 16:15:53 +01004648static void PerformMainTests(FILE* file)
4649{
4650 uint32_t repeatCount = 1;
4651 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4652
4653 Config config{};
4654 config.RandSeed = 65735476;
4655 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4656 config.FreeOrder = FREE_ORDER::FORWARD;
4657
4658 size_t threadCountCount = 1;
4659 switch(ConfigType)
4660 {
4661 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4662 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4663 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4664 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4665 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4666 default: assert(0);
4667 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004668
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004669 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004670
Adam Sawickib8333fb2018-03-13 16:15:53 +01004671 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4672 {
4673 std::string desc1;
4674
4675 switch(threadCountIndex)
4676 {
4677 case 0:
4678 desc1 += "1_thread";
4679 config.ThreadCount = 1;
4680 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4681 break;
4682 case 1:
4683 desc1 += "16_threads+0%_common";
4684 config.ThreadCount = 16;
4685 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4686 break;
4687 case 2:
4688 desc1 += "16_threads+50%_common";
4689 config.ThreadCount = 16;
4690 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4691 break;
4692 case 3:
4693 desc1 += "16_threads+100%_common";
4694 config.ThreadCount = 16;
4695 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4696 break;
4697 case 4:
4698 desc1 += "2_threads+0%_common";
4699 config.ThreadCount = 2;
4700 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4701 break;
4702 case 5:
4703 desc1 += "2_threads+50%_common";
4704 config.ThreadCount = 2;
4705 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4706 break;
4707 case 6:
4708 desc1 += "2_threads+100%_common";
4709 config.ThreadCount = 2;
4710 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4711 break;
4712 default:
4713 assert(0);
4714 }
4715
4716 // 0 = buffers, 1 = images, 2 = buffers and images
4717 size_t buffersVsImagesCount = 2;
4718 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4719 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4720 {
4721 std::string desc2 = desc1;
4722 switch(buffersVsImagesIndex)
4723 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004724 case 0: desc2 += ",Buffers"; break;
4725 case 1: desc2 += ",Images"; break;
4726 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004727 default: assert(0);
4728 }
4729
4730 // 0 = small, 1 = large, 2 = small and large
4731 size_t smallVsLargeCount = 2;
4732 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4733 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4734 {
4735 std::string desc3 = desc2;
4736 switch(smallVsLargeIndex)
4737 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004738 case 0: desc3 += ",Small"; break;
4739 case 1: desc3 += ",Large"; break;
4740 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004741 default: assert(0);
4742 }
4743
4744 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4745 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4746 else
4747 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4748
4749 // 0 = varying sizes min...max, 1 = set of constant sizes
4750 size_t constantSizesCount = 1;
4751 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4752 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4753 {
4754 std::string desc4 = desc3;
4755 switch(constantSizesIndex)
4756 {
4757 case 0: desc4 += " Varying_sizes"; break;
4758 case 1: desc4 += " Constant_sizes"; break;
4759 default: assert(0);
4760 }
4761
4762 config.AllocationSizes.clear();
4763 // Buffers present
4764 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4765 {
4766 // Small
4767 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4768 {
4769 // Varying size
4770 if(constantSizesIndex == 0)
4771 config.AllocationSizes.push_back({4, 16, 1024});
4772 // Constant sizes
4773 else
4774 {
4775 config.AllocationSizes.push_back({1, 16, 16});
4776 config.AllocationSizes.push_back({1, 64, 64});
4777 config.AllocationSizes.push_back({1, 256, 256});
4778 config.AllocationSizes.push_back({1, 1024, 1024});
4779 }
4780 }
4781 // Large
4782 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4783 {
4784 // Varying size
4785 if(constantSizesIndex == 0)
4786 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4787 // Constant sizes
4788 else
4789 {
4790 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4791 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4792 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4793 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4794 }
4795 }
4796 }
4797 // Images present
4798 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4799 {
4800 // Small
4801 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4802 {
4803 // Varying size
4804 if(constantSizesIndex == 0)
4805 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4806 // Constant sizes
4807 else
4808 {
4809 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4810 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4811 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4812 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4813 }
4814 }
4815 // Large
4816 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4817 {
4818 // Varying size
4819 if(constantSizesIndex == 0)
4820 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4821 // Constant sizes
4822 else
4823 {
4824 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4825 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4826 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4827 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4828 }
4829 }
4830 }
4831
4832 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4833 size_t beginBytesToAllocateCount = 1;
4834 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4835 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4836 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4837 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4838 {
4839 std::string desc5 = desc4;
4840
4841 switch(beginBytesToAllocateIndex)
4842 {
4843 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004844 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004845 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4846 config.AdditionalOperationCount = 0;
4847 break;
4848 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004849 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004850 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4851 config.AdditionalOperationCount = 1024;
4852 break;
4853 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004854 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004855 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4856 config.AdditionalOperationCount = 1024;
4857 break;
4858 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004859 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004860 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4861 config.AdditionalOperationCount = 1024;
4862 break;
4863 default:
4864 assert(0);
4865 }
4866
Adam Sawicki0667e332018-08-24 17:26:44 +02004867 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004868 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004869 std::string desc6 = desc5;
4870 switch(strategyIndex)
4871 {
4872 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004873 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004874 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4875 break;
4876 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004877 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004878 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4879 break;
4880 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004881 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004882 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4883 break;
4884 default:
4885 assert(0);
4886 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004887
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004888 desc6 += ',';
4889 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004890
4891 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004892
4893 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4894 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004895 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004896
4897 Result result{};
4898 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004899 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004900 if(file)
4901 {
4902 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4903 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004904 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004905 }
4906 }
4907 }
4908 }
4909 }
4910 }
4911}
4912
4913static void PerformPoolTests(FILE* file)
4914{
4915 const size_t AVG_RESOURCES_PER_POOL = 300;
4916
4917 uint32_t repeatCount = 1;
4918 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4919
4920 PoolTestConfig config{};
4921 config.RandSeed = 2346343;
4922 config.FrameCount = 200;
4923 config.ItemsToMakeUnusedPercent = 2;
4924
4925 size_t threadCountCount = 1;
4926 switch(ConfigType)
4927 {
4928 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4929 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4930 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4931 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4932 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4933 default: assert(0);
4934 }
4935 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4936 {
4937 std::string desc1;
4938
4939 switch(threadCountIndex)
4940 {
4941 case 0:
4942 desc1 += "1_thread";
4943 config.ThreadCount = 1;
4944 break;
4945 case 1:
4946 desc1 += "16_threads";
4947 config.ThreadCount = 16;
4948 break;
4949 case 2:
4950 desc1 += "2_threads";
4951 config.ThreadCount = 2;
4952 break;
4953 default:
4954 assert(0);
4955 }
4956
4957 // 0 = buffers, 1 = images, 2 = buffers and images
4958 size_t buffersVsImagesCount = 2;
4959 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4960 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4961 {
4962 std::string desc2 = desc1;
4963 switch(buffersVsImagesIndex)
4964 {
4965 case 0: desc2 += " Buffers"; break;
4966 case 1: desc2 += " Images"; break;
4967 case 2: desc2 += " Buffers+Images"; break;
4968 default: assert(0);
4969 }
4970
4971 // 0 = small, 1 = large, 2 = small and large
4972 size_t smallVsLargeCount = 2;
4973 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4974 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4975 {
4976 std::string desc3 = desc2;
4977 switch(smallVsLargeIndex)
4978 {
4979 case 0: desc3 += " Small"; break;
4980 case 1: desc3 += " Large"; break;
4981 case 2: desc3 += " Small+Large"; break;
4982 default: assert(0);
4983 }
4984
4985 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4986 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4987 else
4988 config.PoolSize = 4ull * 1024 * 1024;
4989
4990 // 0 = varying sizes min...max, 1 = set of constant sizes
4991 size_t constantSizesCount = 1;
4992 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4993 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4994 {
4995 std::string desc4 = desc3;
4996 switch(constantSizesIndex)
4997 {
4998 case 0: desc4 += " Varying_sizes"; break;
4999 case 1: desc4 += " Constant_sizes"; break;
5000 default: assert(0);
5001 }
5002
5003 config.AllocationSizes.clear();
5004 // Buffers present
5005 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
5006 {
5007 // Small
5008 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5009 {
5010 // Varying size
5011 if(constantSizesIndex == 0)
5012 config.AllocationSizes.push_back({4, 16, 1024});
5013 // Constant sizes
5014 else
5015 {
5016 config.AllocationSizes.push_back({1, 16, 16});
5017 config.AllocationSizes.push_back({1, 64, 64});
5018 config.AllocationSizes.push_back({1, 256, 256});
5019 config.AllocationSizes.push_back({1, 1024, 1024});
5020 }
5021 }
5022 // Large
5023 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5024 {
5025 // Varying size
5026 if(constantSizesIndex == 0)
5027 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5028 // Constant sizes
5029 else
5030 {
5031 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
5032 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
5033 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
5034 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
5035 }
5036 }
5037 }
5038 // Images present
5039 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
5040 {
5041 // Small
5042 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5043 {
5044 // Varying size
5045 if(constantSizesIndex == 0)
5046 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5047 // Constant sizes
5048 else
5049 {
5050 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
5051 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
5052 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
5053 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
5054 }
5055 }
5056 // Large
5057 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5058 {
5059 // Varying size
5060 if(constantSizesIndex == 0)
5061 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5062 // Constant sizes
5063 else
5064 {
5065 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
5066 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
5067 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
5068 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
5069 }
5070 }
5071 }
5072
5073 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
5074 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
5075
5076 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
5077 size_t subscriptionModeCount;
5078 switch(ConfigType)
5079 {
5080 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
5081 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
5082 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
5083 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
5084 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
5085 default: assert(0);
5086 }
5087 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
5088 {
5089 std::string desc5 = desc4;
5090
5091 switch(subscriptionModeIndex)
5092 {
5093 case 0:
5094 desc5 += " Subscription_66%";
5095 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
5096 break;
5097 case 1:
5098 desc5 += " Subscription_133%";
5099 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
5100 break;
5101 case 2:
5102 desc5 += " Subscription_100%";
5103 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
5104 break;
5105 case 3:
5106 desc5 += " Subscription_33%";
5107 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
5108 break;
5109 case 4:
5110 desc5 += " Subscription_166%";
5111 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
5112 break;
5113 default:
5114 assert(0);
5115 }
5116
5117 config.TotalItemCount = config.UsedItemCountMax * 5;
5118 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
5119
5120 const char* testDescription = desc5.c_str();
5121
5122 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
5123 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005124 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005125
5126 PoolTestResult result{};
5127 g_MemoryAliasingWarningEnabled = false;
5128 TestPool_Benchmark(result, config);
5129 g_MemoryAliasingWarningEnabled = true;
5130 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
5131 }
5132 }
5133 }
5134 }
5135 }
5136 }
5137}
5138
Adam Sawickia83793a2018-09-03 13:40:42 +02005139static void BasicTestBuddyAllocator()
5140{
5141 wprintf(L"Basic test buddy allocator\n");
5142
5143 RandomNumberGenerator rand{76543};
5144
5145 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5146 sampleBufCreateInfo.size = 1024; // Whatever.
5147 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5148
5149 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5150 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5151
5152 VmaPoolCreateInfo poolCreateInfo = {};
5153 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005154 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005155
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02005156 // Deliberately adding 1023 to test usable size smaller than memory block size.
5157 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02005158 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02005159 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02005160
5161 VmaPool pool = nullptr;
5162 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005163 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005164
5165 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
5166
5167 VmaAllocationCreateInfo allocCreateInfo = {};
5168 allocCreateInfo.pool = pool;
5169
5170 std::vector<BufferInfo> bufInfo;
5171 BufferInfo newBufInfo;
5172 VmaAllocationInfo allocInfo;
5173
5174 bufCreateInfo.size = 1024 * 256;
5175 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5176 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005177 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005178 bufInfo.push_back(newBufInfo);
5179
5180 bufCreateInfo.size = 1024 * 512;
5181 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5182 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005183 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005184 bufInfo.push_back(newBufInfo);
5185
5186 bufCreateInfo.size = 1024 * 128;
5187 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5188 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005189 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005190 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02005191
5192 // Test very small allocation, smaller than minimum node size.
5193 bufCreateInfo.size = 1;
5194 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5195 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005196 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02005197 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02005198
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005199 // Test some small allocation with alignment requirement.
5200 {
5201 VkMemoryRequirements memReq;
5202 memReq.alignment = 256;
5203 memReq.memoryTypeBits = UINT32_MAX;
5204 memReq.size = 32;
5205
5206 newBufInfo.Buffer = VK_NULL_HANDLE;
5207 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
5208 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005209 TEST(res == VK_SUCCESS);
5210 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005211 bufInfo.push_back(newBufInfo);
5212 }
5213
5214 //SaveAllocatorStatsToFile(L"TEST.json");
5215
Adam Sawicki21017c62018-09-07 15:26:59 +02005216 VmaPoolStats stats = {};
5217 vmaGetPoolStats(g_hAllocator, pool, &stats);
5218 int DBG = 0; // Set breakpoint here to inspect `stats`.
5219
Adam Sawicki80927152018-09-07 17:27:23 +02005220 // Allocate enough new buffers to surely fall into second block.
5221 for(uint32_t i = 0; i < 32; ++i)
5222 {
5223 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
5224 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5225 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005226 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02005227 bufInfo.push_back(newBufInfo);
5228 }
5229
5230 SaveAllocatorStatsToFile(L"BuddyTest01.json");
5231
Adam Sawickia83793a2018-09-03 13:40:42 +02005232 // Destroy the buffers in random order.
5233 while(!bufInfo.empty())
5234 {
5235 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
5236 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
5237 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
5238 bufInfo.erase(bufInfo.begin() + indexToDestroy);
5239 }
5240
5241 vmaDestroyPool(g_hAllocator, pool);
5242}
5243
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005244static void BasicTestAllocatePages()
5245{
5246 wprintf(L"Basic test allocate pages\n");
5247
5248 RandomNumberGenerator rand{765461};
5249
5250 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5251 sampleBufCreateInfo.size = 1024; // Whatever.
5252 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5253
5254 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5255 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5256
5257 VmaPoolCreateInfo poolCreateInfo = {};
5258 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02005259 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005260
5261 // 1 block of 1 MB.
5262 poolCreateInfo.blockSize = 1024 * 1024;
5263 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
5264
5265 // Create pool.
5266 VmaPool pool = nullptr;
5267 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02005268 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005269
5270 // Make 100 allocations of 4 KB - they should fit into the pool.
5271 VkMemoryRequirements memReq;
5272 memReq.memoryTypeBits = UINT32_MAX;
5273 memReq.alignment = 4 * 1024;
5274 memReq.size = 4 * 1024;
5275
5276 VmaAllocationCreateInfo allocCreateInfo = {};
5277 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5278 allocCreateInfo.pool = pool;
5279
5280 constexpr uint32_t allocCount = 100;
5281
5282 std::vector<VmaAllocation> alloc{allocCount};
5283 std::vector<VmaAllocationInfo> allocInfo{allocCount};
5284 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005285 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005286 for(uint32_t i = 0; i < allocCount; ++i)
5287 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005288 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005289 allocInfo[i].pMappedData != nullptr &&
5290 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5291 allocInfo[i].memoryType == allocInfo[0].memoryType);
5292 }
5293
5294 // Free the allocations.
5295 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5296 std::fill(alloc.begin(), alloc.end(), nullptr);
5297 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5298
5299 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5300 // Also test optional allocationInfo = null.
5301 memReq.size = 100 * 1024;
5302 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005303 TEST(res != VK_SUCCESS);
5304 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005305
5306 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5307 memReq.size = 4 * 1024;
5308 memReq.alignment = 128 * 1024;
5309 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005310 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005311
5312 // Make 100 dedicated allocations of 4 KB.
5313 memReq.alignment = 4 * 1024;
5314 memReq.size = 4 * 1024;
5315
5316 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5317 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5318 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5319 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005320 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005321 for(uint32_t i = 0; i < allocCount; ++i)
5322 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005323 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005324 allocInfo[i].pMappedData != nullptr &&
5325 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5326 allocInfo[i].offset == 0);
5327 if(i > 0)
5328 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005329 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005330 }
5331 }
5332
5333 // Free the allocations.
5334 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5335 std::fill(alloc.begin(), alloc.end(), nullptr);
5336 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5337
5338 vmaDestroyPool(g_hAllocator, pool);
5339}
5340
Adam Sawickif2975342018-10-16 13:49:02 +02005341// Test the testing environment.
5342static void TestGpuData()
5343{
5344 RandomNumberGenerator rand = { 53434 };
5345
5346 std::vector<AllocInfo> allocInfo;
5347
5348 for(size_t i = 0; i < 100; ++i)
5349 {
5350 AllocInfo info = {};
5351
5352 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5353 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5354 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5355 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5356 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5357
5358 VmaAllocationCreateInfo allocCreateInfo = {};
5359 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5360
5361 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5362 TEST(res == VK_SUCCESS);
5363
5364 info.m_StartValue = rand.Generate();
5365
5366 allocInfo.push_back(std::move(info));
5367 }
5368
5369 UploadGpuData(allocInfo.data(), allocInfo.size());
5370
5371 ValidateGpuData(allocInfo.data(), allocInfo.size());
5372
5373 DestroyAllAllocations(allocInfo);
5374}
5375
Adam Sawickib8333fb2018-03-13 16:15:53 +01005376void Test()
5377{
5378 wprintf(L"TESTING:\n");
5379
Adam Sawicki48b8a332019-11-02 15:24:33 +01005380 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005381 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005382 ////////////////////////////////////////////////////////////////////////////////
5383 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02005384 return;
5385 }
5386
Adam Sawickib8333fb2018-03-13 16:15:53 +01005387 // # Simple tests
5388
5389 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005390 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005391#if VMA_DEBUG_MARGIN
5392 TestDebugMargin();
5393#else
5394 TestPool_SameSize();
5395 TestHeapSizeLimit();
5396#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005397#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5398 TestAllocationsInitialization();
5399#endif
Adam Sawickiefa88c42019-11-18 16:33:56 +01005400 TestMemoryUsage();
Adam Sawicki40ffe982019-10-11 15:56:02 +02005401 TestBudget();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005402 TestMapping();
Adam Sawickidaa6a552019-06-25 15:26:37 +02005403 TestDeviceLocalMapped();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005404 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005405 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005406 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005407 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005408
Adam Sawicki4338f662018-09-07 14:12:37 +02005409 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005410 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005411
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005412 {
5413 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005414 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005415 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005416 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005417 fclose(file);
5418 }
5419
Adam Sawickib8333fb2018-03-13 16:15:53 +01005420 TestDefragmentationSimple();
5421 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005422 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005423 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005424
5425 // # Detailed tests
5426 FILE* file;
5427 fopen_s(&file, "Results.csv", "w");
5428 assert(file != NULL);
5429
5430 WriteMainTestResultHeader(file);
5431 PerformMainTests(file);
5432 //PerformCustomMainTest(file);
5433
5434 WritePoolTestResultHeader(file);
5435 PerformPoolTests(file);
5436 //PerformCustomPoolTest(file);
5437
5438 fclose(file);
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01005439
Adam Sawickib8333fb2018-03-13 16:15:53 +01005440 wprintf(L"Done.\n");
5441}
5442
Adam Sawickif1a793c2018-03-13 15:42:22 +01005443#endif // #ifdef _WIN32