blob: bf2a317733d2753d2ef2b5e61664cfa5abc97611 [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
Adam Sawicki50882502020-02-07 16:51:31 +01002// Copyright (c) 2017-2020 Advanced Micro Devices, Inc. All rights reserved.
Adam Sawickiae5c4662019-01-02 10:23:35 +01003//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawicki94ce3d72019-04-17 14:59:25 +020029#include <functional>
Adam Sawickif1a793c2018-03-13 15:42:22 +010030
31#ifdef _WIN32
32
Adam Sawicki33d2ce72018-08-27 13:59:13 +020033static const char* CODE_DESCRIPTION = "Foo";
34
Adam Sawickif2975342018-10-16 13:49:02 +020035extern VkCommandBuffer g_hTemporaryCommandBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +020036extern const VkAllocationCallbacks* g_Allocs;
Adam Sawickif2975342018-10-16 13:49:02 +020037void BeginSingleTimeCommands();
38void EndSingleTimeCommands();
39
Adam Sawickibdb89a92018-12-13 11:56:30 +010040#ifndef VMA_DEBUG_MARGIN
41 #define VMA_DEBUG_MARGIN 0
42#endif
43
Adam Sawicki0a607132018-08-24 11:18:41 +020044enum CONFIG_TYPE {
45 CONFIG_TYPE_MINIMUM,
46 CONFIG_TYPE_SMALL,
47 CONFIG_TYPE_AVERAGE,
48 CONFIG_TYPE_LARGE,
49 CONFIG_TYPE_MAXIMUM,
50 CONFIG_TYPE_COUNT
51};
52
Adam Sawickif2975342018-10-16 13:49:02 +020053static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
54//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020055
Adam Sawickib8333fb2018-03-13 16:15:53 +010056enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
57
Adam Sawicki0667e332018-08-24 17:26:44 +020058static const char* FREE_ORDER_NAMES[] = {
59 "FORWARD",
60 "BACKWARD",
61 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020062};
63
Adam Sawicki80927152018-09-07 17:27:23 +020064// Copy of internal VmaAlgorithmToStr.
65static const char* AlgorithmToStr(uint32_t algorithm)
66{
67 switch(algorithm)
68 {
69 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
70 return "Linear";
71 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
72 return "Buddy";
73 case 0:
74 return "Default";
75 default:
76 assert(0);
77 return "";
78 }
79}
80
Adam Sawickib8333fb2018-03-13 16:15:53 +010081struct AllocationSize
82{
83 uint32_t Probability;
84 VkDeviceSize BufferSizeMin, BufferSizeMax;
85 uint32_t ImageSizeMin, ImageSizeMax;
86};
87
88struct Config
89{
90 uint32_t RandSeed;
91 VkDeviceSize BeginBytesToAllocate;
92 uint32_t AdditionalOperationCount;
93 VkDeviceSize MaxBytesToAllocate;
94 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
95 std::vector<AllocationSize> AllocationSizes;
96 uint32_t ThreadCount;
97 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
98 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020099 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +0100100};
101
102struct Result
103{
104 duration TotalTime;
105 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
106 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
107 VkDeviceSize TotalMemoryAllocated;
108 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
109};
110
111void TestDefragmentationSimple();
112void TestDefragmentationFull();
113
114struct PoolTestConfig
115{
116 uint32_t RandSeed;
117 uint32_t ThreadCount;
118 VkDeviceSize PoolSize;
119 uint32_t FrameCount;
120 uint32_t TotalItemCount;
121 // Range for number of items used in each frame.
122 uint32_t UsedItemCountMin, UsedItemCountMax;
123 // Percent of items to make unused, and possibly make some others used in each frame.
124 uint32_t ItemsToMakeUnusedPercent;
125 std::vector<AllocationSize> AllocationSizes;
126
127 VkDeviceSize CalcAvgResourceSize() const
128 {
129 uint32_t probabilitySum = 0;
130 VkDeviceSize sizeSum = 0;
131 for(size_t i = 0; i < AllocationSizes.size(); ++i)
132 {
133 const AllocationSize& allocSize = AllocationSizes[i];
134 if(allocSize.BufferSizeMax > 0)
135 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
136 else
137 {
138 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
139 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
140 }
141 probabilitySum += allocSize.Probability;
142 }
143 return sizeSum / probabilitySum;
144 }
145
146 bool UsesBuffers() const
147 {
148 for(size_t i = 0; i < AllocationSizes.size(); ++i)
149 if(AllocationSizes[i].BufferSizeMax > 0)
150 return true;
151 return false;
152 }
153
154 bool UsesImages() const
155 {
156 for(size_t i = 0; i < AllocationSizes.size(); ++i)
157 if(AllocationSizes[i].ImageSizeMax > 0)
158 return true;
159 return false;
160 }
161};
162
163struct PoolTestResult
164{
165 duration TotalTime;
166 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
167 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
168 size_t LostAllocationCount, LostAllocationTotalSize;
169 size_t FailedAllocationCount, FailedAllocationTotalSize;
170};
171
172static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
173
Adam Sawicki51fa9662018-10-03 13:44:29 +0200174uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200175
Adam Sawickib8333fb2018-03-13 16:15:53 +0100176struct BufferInfo
177{
178 VkBuffer Buffer = VK_NULL_HANDLE;
179 VmaAllocation Allocation = VK_NULL_HANDLE;
180};
181
Adam Sawicki40ffe982019-10-11 15:56:02 +0200182static uint32_t MemoryTypeToHeap(uint32_t memoryTypeIndex)
183{
184 const VkPhysicalDeviceMemoryProperties* props;
185 vmaGetMemoryProperties(g_hAllocator, &props);
186 return props->memoryTypes[memoryTypeIndex].heapIndex;
187}
188
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200189static uint32_t GetAllocationStrategyCount()
190{
191 uint32_t strategyCount = 0;
192 switch(ConfigType)
193 {
194 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
195 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
196 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
197 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
198 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
199 default: assert(0);
200 }
201 return strategyCount;
202}
203
204static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
205{
206 switch(allocStrategy)
207 {
208 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
209 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
210 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
211 case 0: return "Default"; break;
212 default: assert(0); return "";
213 }
214}
215
Adam Sawickib8333fb2018-03-13 16:15:53 +0100216static void InitResult(Result& outResult)
217{
218 outResult.TotalTime = duration::zero();
219 outResult.AllocationTimeMin = duration::max();
220 outResult.AllocationTimeAvg = duration::zero();
221 outResult.AllocationTimeMax = duration::min();
222 outResult.DeallocationTimeMin = duration::max();
223 outResult.DeallocationTimeAvg = duration::zero();
224 outResult.DeallocationTimeMax = duration::min();
225 outResult.TotalMemoryAllocated = 0;
226 outResult.FreeRangeSizeAvg = 0;
227 outResult.FreeRangeSizeMax = 0;
228}
229
230class TimeRegisterObj
231{
232public:
233 TimeRegisterObj(duration& min, duration& sum, duration& max) :
234 m_Min(min),
235 m_Sum(sum),
236 m_Max(max),
237 m_TimeBeg(std::chrono::high_resolution_clock::now())
238 {
239 }
240
241 ~TimeRegisterObj()
242 {
243 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
244 m_Sum += d;
245 if(d < m_Min) m_Min = d;
246 if(d > m_Max) m_Max = d;
247 }
248
249private:
250 duration& m_Min;
251 duration& m_Sum;
252 duration& m_Max;
253 time_point m_TimeBeg;
254};
255
256struct PoolTestThreadResult
257{
258 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
259 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
260 size_t AllocationCount, DeallocationCount;
261 size_t LostAllocationCount, LostAllocationTotalSize;
262 size_t FailedAllocationCount, FailedAllocationTotalSize;
263};
264
265class AllocationTimeRegisterObj : public TimeRegisterObj
266{
267public:
268 AllocationTimeRegisterObj(Result& result) :
269 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
270 {
271 }
272};
273
274class DeallocationTimeRegisterObj : public TimeRegisterObj
275{
276public:
277 DeallocationTimeRegisterObj(Result& result) :
278 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
279 {
280 }
281};
282
283class PoolAllocationTimeRegisterObj : public TimeRegisterObj
284{
285public:
286 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
287 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
288 {
289 }
290};
291
292class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
293{
294public:
295 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
296 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
297 {
298 }
299};
300
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200301static void CurrentTimeToStr(std::string& out)
302{
303 time_t rawTime; time(&rawTime);
304 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
305 char timeStr[128];
306 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
307 out = timeStr;
308}
309
Adam Sawickib8333fb2018-03-13 16:15:53 +0100310VkResult MainTest(Result& outResult, const Config& config)
311{
312 assert(config.ThreadCount > 0);
313
314 InitResult(outResult);
315
316 RandomNumberGenerator mainRand{config.RandSeed};
317
318 time_point timeBeg = std::chrono::high_resolution_clock::now();
319
320 std::atomic<size_t> allocationCount = 0;
321 VkResult res = VK_SUCCESS;
322
323 uint32_t memUsageProbabilitySum =
324 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
325 config.MemUsageProbability[2] + config.MemUsageProbability[3];
326 assert(memUsageProbabilitySum > 0);
327
328 uint32_t allocationSizeProbabilitySum = std::accumulate(
329 config.AllocationSizes.begin(),
330 config.AllocationSizes.end(),
331 0u,
332 [](uint32_t sum, const AllocationSize& allocSize) {
333 return sum + allocSize.Probability;
334 });
335
336 struct Allocation
337 {
338 VkBuffer Buffer;
339 VkImage Image;
340 VmaAllocation Alloc;
341 };
342
343 std::vector<Allocation> commonAllocations;
344 std::mutex commonAllocationsMutex;
345
346 auto Allocate = [&](
347 VkDeviceSize bufferSize,
348 const VkExtent2D imageExtent,
349 RandomNumberGenerator& localRand,
350 VkDeviceSize& totalAllocatedBytes,
351 std::vector<Allocation>& allocations) -> VkResult
352 {
353 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
354
355 uint32_t memUsageIndex = 0;
356 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
357 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
358 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
359
360 VmaAllocationCreateInfo memReq = {};
361 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200362 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100363
364 Allocation allocation = {};
365 VmaAllocationInfo allocationInfo;
366
367 // Buffer
368 if(bufferSize > 0)
369 {
370 assert(imageExtent.width == 0);
371 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
372 bufferInfo.size = bufferSize;
373 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
374
375 {
376 AllocationTimeRegisterObj timeRegisterObj{outResult};
377 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
378 }
379 }
380 // Image
381 else
382 {
383 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
384 imageInfo.imageType = VK_IMAGE_TYPE_2D;
385 imageInfo.extent.width = imageExtent.width;
386 imageInfo.extent.height = imageExtent.height;
387 imageInfo.extent.depth = 1;
388 imageInfo.mipLevels = 1;
389 imageInfo.arrayLayers = 1;
390 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
391 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
392 VK_IMAGE_TILING_OPTIMAL :
393 VK_IMAGE_TILING_LINEAR;
394 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
395 switch(memReq.usage)
396 {
397 case VMA_MEMORY_USAGE_GPU_ONLY:
398 switch(localRand.Generate() % 3)
399 {
400 case 0:
401 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
402 break;
403 case 1:
404 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
405 break;
406 case 2:
407 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
408 break;
409 }
410 break;
411 case VMA_MEMORY_USAGE_CPU_ONLY:
412 case VMA_MEMORY_USAGE_CPU_TO_GPU:
413 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
414 break;
415 case VMA_MEMORY_USAGE_GPU_TO_CPU:
416 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
417 break;
418 }
419 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
420 imageInfo.flags = 0;
421
422 {
423 AllocationTimeRegisterObj timeRegisterObj{outResult};
424 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
425 }
426 }
427
428 if(res == VK_SUCCESS)
429 {
430 ++allocationCount;
431 totalAllocatedBytes += allocationInfo.size;
432 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
433 if(useCommonAllocations)
434 {
435 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
436 commonAllocations.push_back(allocation);
437 }
438 else
439 allocations.push_back(allocation);
440 }
441 else
442 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200443 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100444 }
445 return res;
446 };
447
448 auto GetNextAllocationSize = [&](
449 VkDeviceSize& outBufSize,
450 VkExtent2D& outImageSize,
451 RandomNumberGenerator& localRand)
452 {
453 outBufSize = 0;
454 outImageSize = {0, 0};
455
456 uint32_t allocSizeIndex = 0;
457 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
458 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
459 r -= config.AllocationSizes[allocSizeIndex++].Probability;
460
461 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
462 if(allocSize.BufferSizeMax > 0)
463 {
464 assert(allocSize.ImageSizeMax == 0);
465 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
466 outBufSize = allocSize.BufferSizeMin;
467 else
468 {
469 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
470 outBufSize = outBufSize / 16 * 16;
471 }
472 }
473 else
474 {
475 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
476 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
477 else
478 {
479 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
480 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
481 }
482 }
483 };
484
485 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
486 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
487
488 auto ThreadProc = [&](uint32_t randSeed) -> void
489 {
490 RandomNumberGenerator threadRand(randSeed);
491 VkDeviceSize threadTotalAllocatedBytes = 0;
492 std::vector<Allocation> threadAllocations;
493 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
494 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
495 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
496
497 // BEGIN ALLOCATIONS
498 for(;;)
499 {
500 VkDeviceSize bufferSize = 0;
501 VkExtent2D imageExtent = {};
502 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
503 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
504 threadBeginBytesToAllocate)
505 {
506 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
507 break;
508 }
509 else
510 break;
511 }
512
513 // ADDITIONAL ALLOCATIONS AND FREES
514 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
515 {
516 VkDeviceSize bufferSize = 0;
517 VkExtent2D imageExtent = {};
518 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
519
520 // true = allocate, false = free
521 bool allocate = threadRand.Generate() % 2 != 0;
522
523 if(allocate)
524 {
525 if(threadTotalAllocatedBytes +
526 bufferSize +
527 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
528 threadMaxBytesToAllocate)
529 {
530 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
531 break;
532 }
533 }
534 else
535 {
536 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
537 if(useCommonAllocations)
538 {
539 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
540 if(!commonAllocations.empty())
541 {
542 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
543 VmaAllocationInfo allocationInfo;
544 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
545 if(threadTotalAllocatedBytes >= allocationInfo.size)
546 {
547 DeallocationTimeRegisterObj timeRegisterObj{outResult};
548 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
549 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
550 else
551 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
552 threadTotalAllocatedBytes -= allocationInfo.size;
553 commonAllocations.erase(commonAllocations.begin() + indexToFree);
554 }
555 }
556 }
557 else
558 {
559 if(!threadAllocations.empty())
560 {
561 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
562 VmaAllocationInfo allocationInfo;
563 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
564 if(threadTotalAllocatedBytes >= allocationInfo.size)
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 threadTotalAllocatedBytes -= allocationInfo.size;
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 }
575 }
576 }
577 }
578
579 ++numThreadsReachedMaxAllocations;
580
581 WaitForSingleObject(threadsFinishEvent, INFINITE);
582
583 // DEALLOCATION
584 while(!threadAllocations.empty())
585 {
586 size_t indexToFree = 0;
587 switch(config.FreeOrder)
588 {
589 case FREE_ORDER::FORWARD:
590 indexToFree = 0;
591 break;
592 case FREE_ORDER::BACKWARD:
593 indexToFree = threadAllocations.size() - 1;
594 break;
595 case FREE_ORDER::RANDOM:
596 indexToFree = mainRand.Generate() % threadAllocations.size();
597 break;
598 }
599
600 {
601 DeallocationTimeRegisterObj timeRegisterObj{outResult};
602 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
603 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
604 else
605 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
606 }
607 threadAllocations.erase(threadAllocations.begin() + indexToFree);
608 }
609 };
610
611 uint32_t threadRandSeed = mainRand.Generate();
612 std::vector<std::thread> bkgThreads;
613 for(size_t i = 0; i < config.ThreadCount; ++i)
614 {
615 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
616 }
617
618 // Wait for threads reached max allocations
619 while(numThreadsReachedMaxAllocations < config.ThreadCount)
620 Sleep(0);
621
622 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
623 VmaStats vmaStats = {};
624 vmaCalculateStats(g_hAllocator, &vmaStats);
625 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
626 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
627 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
628
629 // Signal threads to deallocate
630 SetEvent(threadsFinishEvent);
631
632 // Wait for threads finished
633 for(size_t i = 0; i < bkgThreads.size(); ++i)
634 bkgThreads[i].join();
635 bkgThreads.clear();
636
637 CloseHandle(threadsFinishEvent);
638
639 // Deallocate remaining common resources
640 while(!commonAllocations.empty())
641 {
642 size_t indexToFree = 0;
643 switch(config.FreeOrder)
644 {
645 case FREE_ORDER::FORWARD:
646 indexToFree = 0;
647 break;
648 case FREE_ORDER::BACKWARD:
649 indexToFree = commonAllocations.size() - 1;
650 break;
651 case FREE_ORDER::RANDOM:
652 indexToFree = mainRand.Generate() % commonAllocations.size();
653 break;
654 }
655
656 {
657 DeallocationTimeRegisterObj timeRegisterObj{outResult};
658 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
659 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
660 else
661 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
662 }
663 commonAllocations.erase(commonAllocations.begin() + indexToFree);
664 }
665
666 if(allocationCount)
667 {
668 outResult.AllocationTimeAvg /= allocationCount;
669 outResult.DeallocationTimeAvg /= allocationCount;
670 }
671
672 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
673
674 return res;
675}
676
Adam Sawicki51fa9662018-10-03 13:44:29 +0200677void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100678{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100679 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100680 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200681 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100682 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200683 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100684}
685
686struct AllocInfo
687{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200688 VmaAllocation m_Allocation = VK_NULL_HANDLE;
689 VkBuffer m_Buffer = VK_NULL_HANDLE;
690 VkImage m_Image = VK_NULL_HANDLE;
Adam Sawickia52012d2019-12-23 15:28:51 +0100691 VkImageLayout m_ImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200692 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100693 union
694 {
695 VkBufferCreateInfo m_BufferInfo;
696 VkImageCreateInfo m_ImageInfo;
697 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200698
Adam Sawickic467e282019-12-23 16:38:31 +0100699 // After defragmentation.
700 VkBuffer m_NewBuffer = VK_NULL_HANDLE;
701 VkImage m_NewImage = VK_NULL_HANDLE;
702
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200703 void CreateBuffer(
704 const VkBufferCreateInfo& bufCreateInfo,
705 const VmaAllocationCreateInfo& allocCreateInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +0100706 void CreateImage(
707 const VkImageCreateInfo& imageCreateInfo,
708 const VmaAllocationCreateInfo& allocCreateInfo,
709 VkImageLayout layout);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200710 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100711};
712
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200713void AllocInfo::CreateBuffer(
714 const VkBufferCreateInfo& bufCreateInfo,
715 const VmaAllocationCreateInfo& allocCreateInfo)
716{
717 m_BufferInfo = bufCreateInfo;
718 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
719 TEST(res == VK_SUCCESS);
720}
Adam Sawickia52012d2019-12-23 15:28:51 +0100721void AllocInfo::CreateImage(
722 const VkImageCreateInfo& imageCreateInfo,
723 const VmaAllocationCreateInfo& allocCreateInfo,
724 VkImageLayout layout)
725{
726 m_ImageInfo = imageCreateInfo;
727 m_ImageLayout = layout;
728 VkResult res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &m_Image, &m_Allocation, nullptr);
729 TEST(res == VK_SUCCESS);
730}
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200731
732void AllocInfo::Destroy()
733{
734 if(m_Image)
735 {
Adam Sawickic467e282019-12-23 16:38:31 +0100736 assert(!m_Buffer);
Adam Sawicki1f84f622019-07-02 13:40:01 +0200737 vkDestroyImage(g_hDevice, m_Image, g_Allocs);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100738 m_Image = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200739 }
740 if(m_Buffer)
741 {
Adam Sawickic467e282019-12-23 16:38:31 +0100742 assert(!m_Image);
Adam Sawicki1f84f622019-07-02 13:40:01 +0200743 vkDestroyBuffer(g_hDevice, m_Buffer, g_Allocs);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100744 m_Buffer = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200745 }
746 if(m_Allocation)
747 {
748 vmaFreeMemory(g_hAllocator, m_Allocation);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100749 m_Allocation = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200750 }
751}
752
Adam Sawickif2975342018-10-16 13:49:02 +0200753class StagingBufferCollection
754{
755public:
756 StagingBufferCollection() { }
757 ~StagingBufferCollection();
758 // Returns false if maximum total size of buffers would be exceeded.
759 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
760 void ReleaseAllBuffers();
761
762private:
763 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
764 struct BufInfo
765 {
766 VmaAllocation Allocation = VK_NULL_HANDLE;
767 VkBuffer Buffer = VK_NULL_HANDLE;
768 VkDeviceSize Size = VK_WHOLE_SIZE;
769 void* MappedPtr = nullptr;
770 bool Used = false;
771 };
772 std::vector<BufInfo> m_Bufs;
773 // Including both used and unused.
774 VkDeviceSize m_TotalSize = 0;
775};
776
777StagingBufferCollection::~StagingBufferCollection()
778{
779 for(size_t i = m_Bufs.size(); i--; )
780 {
781 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
782 }
783}
784
785bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
786{
787 assert(size <= MAX_TOTAL_SIZE);
788
789 // Try to find existing unused buffer with best size.
790 size_t bestIndex = SIZE_MAX;
791 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
792 {
793 BufInfo& currBufInfo = m_Bufs[i];
794 if(!currBufInfo.Used && currBufInfo.Size >= size &&
795 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
796 {
797 bestIndex = i;
798 }
799 }
800
801 if(bestIndex != SIZE_MAX)
802 {
803 m_Bufs[bestIndex].Used = true;
804 outBuffer = m_Bufs[bestIndex].Buffer;
805 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
806 return true;
807 }
808
809 // Allocate new buffer with requested size.
810 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
811 {
812 BufInfo bufInfo;
813 bufInfo.Size = size;
814 bufInfo.Used = true;
815
816 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
817 bufCreateInfo.size = size;
818 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
819
820 VmaAllocationCreateInfo allocCreateInfo = {};
821 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
822 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
823
824 VmaAllocationInfo allocInfo;
825 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
826 bufInfo.MappedPtr = allocInfo.pMappedData;
827 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
828
829 outBuffer = bufInfo.Buffer;
830 outMappedPtr = bufInfo.MappedPtr;
831
832 m_Bufs.push_back(std::move(bufInfo));
833
834 m_TotalSize += size;
835
836 return true;
837 }
838
839 // There are some unused but smaller buffers: Free them and try again.
840 bool hasUnused = false;
841 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
842 {
843 if(!m_Bufs[i].Used)
844 {
845 hasUnused = true;
846 break;
847 }
848 }
849 if(hasUnused)
850 {
851 for(size_t i = m_Bufs.size(); i--; )
852 {
853 if(!m_Bufs[i].Used)
854 {
855 m_TotalSize -= m_Bufs[i].Size;
856 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
857 m_Bufs.erase(m_Bufs.begin() + i);
858 }
859 }
860
861 return AcquireBuffer(size, outBuffer, outMappedPtr);
862 }
863
864 return false;
865}
866
867void StagingBufferCollection::ReleaseAllBuffers()
868{
869 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
870 {
871 m_Bufs[i].Used = false;
872 }
873}
874
875static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
876{
877 StagingBufferCollection stagingBufs;
878
879 bool cmdBufferStarted = false;
880 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
881 {
882 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
883 if(currAllocInfo.m_Buffer)
884 {
885 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
886
887 VkBuffer stagingBuf = VK_NULL_HANDLE;
888 void* stagingBufMappedPtr = nullptr;
889 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
890 {
891 TEST(cmdBufferStarted);
892 EndSingleTimeCommands();
893 stagingBufs.ReleaseAllBuffers();
894 cmdBufferStarted = false;
895
896 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
897 TEST(ok);
898 }
899
900 // Fill staging buffer.
901 {
902 assert(size % sizeof(uint32_t) == 0);
903 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
904 uint32_t val = currAllocInfo.m_StartValue;
905 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
906 {
907 *stagingValPtr = val;
908 ++stagingValPtr;
909 ++val;
910 }
911 }
912
913 // Issue copy command from staging buffer to destination buffer.
914 if(!cmdBufferStarted)
915 {
916 cmdBufferStarted = true;
917 BeginSingleTimeCommands();
918 }
919
920 VkBufferCopy copy = {};
921 copy.srcOffset = 0;
922 copy.dstOffset = 0;
923 copy.size = size;
924 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
925 }
926 else
927 {
Adam Sawickia52012d2019-12-23 15:28:51 +0100928 TEST(currAllocInfo.m_ImageInfo.format == VK_FORMAT_R8G8B8A8_UNORM && "Only RGBA8 images are currently supported.");
929 TEST(currAllocInfo.m_ImageInfo.mipLevels == 1 && "Only single mip images are currently supported.");
930
Adam Sawickic467e282019-12-23 16:38:31 +0100931 const VkDeviceSize size = (VkDeviceSize)currAllocInfo.m_ImageInfo.extent.width * currAllocInfo.m_ImageInfo.extent.height * sizeof(uint32_t);
Adam Sawickia52012d2019-12-23 15:28:51 +0100932
933 VkBuffer stagingBuf = VK_NULL_HANDLE;
934 void* stagingBufMappedPtr = nullptr;
935 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
936 {
937 TEST(cmdBufferStarted);
938 EndSingleTimeCommands();
939 stagingBufs.ReleaseAllBuffers();
940 cmdBufferStarted = false;
941
942 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
943 TEST(ok);
944 }
945
946 // Fill staging buffer.
947 {
948 assert(size % sizeof(uint32_t) == 0);
949 uint32_t *stagingValPtr = (uint32_t *)stagingBufMappedPtr;
950 uint32_t val = currAllocInfo.m_StartValue;
951 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
952 {
953 *stagingValPtr = val;
954 ++stagingValPtr;
955 ++val;
956 }
957 }
958
959 // Issue copy command from staging buffer to destination buffer.
960 if(!cmdBufferStarted)
961 {
962 cmdBufferStarted = true;
963 BeginSingleTimeCommands();
964 }
965
966
967 // Transfer to transfer dst layout
968 VkImageSubresourceRange subresourceRange = {
969 VK_IMAGE_ASPECT_COLOR_BIT,
970 0, VK_REMAINING_MIP_LEVELS,
971 0, VK_REMAINING_ARRAY_LAYERS
972 };
973
974 VkImageMemoryBarrier barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
975 barrier.srcAccessMask = 0;
976 barrier.dstAccessMask = 0;
977 barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
978 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
979 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
980 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
981 barrier.image = currAllocInfo.m_Image;
982 barrier.subresourceRange = subresourceRange;
983
984 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,
985 0, nullptr,
986 0, nullptr,
987 1, &barrier);
988
989 // Copy image date
990 VkBufferImageCopy copy = {};
991 copy.bufferOffset = 0;
992 copy.bufferRowLength = 0;
993 copy.bufferImageHeight = 0;
994 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
995 copy.imageSubresource.layerCount = 1;
996 copy.imageExtent = currAllocInfo.m_ImageInfo.extent;
997
998 vkCmdCopyBufferToImage(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy);
999
1000 // Transfer to desired layout
1001 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1002 barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
1003 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1004 barrier.newLayout = currAllocInfo.m_ImageLayout;
1005
1006 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,
1007 0, nullptr,
1008 0, nullptr,
1009 1, &barrier);
Adam Sawickif2975342018-10-16 13:49:02 +02001010 }
1011 }
1012
1013 if(cmdBufferStarted)
1014 {
1015 EndSingleTimeCommands();
1016 stagingBufs.ReleaseAllBuffers();
1017 }
1018}
1019
1020static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
1021{
1022 StagingBufferCollection stagingBufs;
1023
1024 bool cmdBufferStarted = false;
1025 size_t validateAllocIndexOffset = 0;
1026 std::vector<void*> validateStagingBuffers;
1027 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
1028 {
1029 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
1030 if(currAllocInfo.m_Buffer)
1031 {
1032 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
1033
1034 VkBuffer stagingBuf = VK_NULL_HANDLE;
1035 void* stagingBufMappedPtr = nullptr;
1036 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
1037 {
1038 TEST(cmdBufferStarted);
1039 EndSingleTimeCommands();
1040 cmdBufferStarted = false;
1041
1042 for(size_t validateIndex = 0;
1043 validateIndex < validateStagingBuffers.size();
1044 ++validateIndex)
1045 {
1046 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
1047 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
1048 TEST(validateSize % sizeof(uint32_t) == 0);
1049 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
1050 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
1051 bool valid = true;
1052 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
1053 {
1054 if(*stagingValPtr != val)
1055 {
1056 valid = false;
1057 break;
1058 }
1059 ++stagingValPtr;
1060 ++val;
1061 }
1062 TEST(valid);
1063 }
1064
1065 stagingBufs.ReleaseAllBuffers();
1066
1067 validateAllocIndexOffset = allocInfoIndex;
1068 validateStagingBuffers.clear();
1069
1070 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
1071 TEST(ok);
1072 }
1073
1074 // Issue copy command from staging buffer to destination buffer.
1075 if(!cmdBufferStarted)
1076 {
1077 cmdBufferStarted = true;
1078 BeginSingleTimeCommands();
1079 }
1080
1081 VkBufferCopy copy = {};
1082 copy.srcOffset = 0;
1083 copy.dstOffset = 0;
1084 copy.size = size;
1085 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
1086
1087 // Sava mapped pointer for later validation.
1088 validateStagingBuffers.push_back(stagingBufMappedPtr);
1089 }
1090 else
1091 {
1092 TEST(0 && "Images not currently supported.");
1093 }
1094 }
1095
1096 if(cmdBufferStarted)
1097 {
1098 EndSingleTimeCommands();
1099
1100 for(size_t validateIndex = 0;
1101 validateIndex < validateStagingBuffers.size();
1102 ++validateIndex)
1103 {
1104 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
1105 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
1106 TEST(validateSize % sizeof(uint32_t) == 0);
1107 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
1108 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
1109 bool valid = true;
1110 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
1111 {
1112 if(*stagingValPtr != val)
1113 {
1114 valid = false;
1115 break;
1116 }
1117 ++stagingValPtr;
1118 ++val;
1119 }
1120 TEST(valid);
1121 }
1122
1123 stagingBufs.ReleaseAllBuffers();
1124 }
1125}
1126
Adam Sawickib8333fb2018-03-13 16:15:53 +01001127static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1128{
1129 outMemReq = {};
1130 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1131 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1132}
1133
1134static void CreateBuffer(
1135 VmaPool pool,
1136 const VkBufferCreateInfo& bufCreateInfo,
1137 bool persistentlyMapped,
1138 AllocInfo& outAllocInfo)
1139{
1140 outAllocInfo = {};
1141 outAllocInfo.m_BufferInfo = bufCreateInfo;
1142
1143 VmaAllocationCreateInfo allocCreateInfo = {};
1144 allocCreateInfo.pool = pool;
1145 if(persistentlyMapped)
1146 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1147
1148 VmaAllocationInfo vmaAllocInfo = {};
1149 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1150
1151 // Setup StartValue and fill.
1152 {
1153 outAllocInfo.m_StartValue = (uint32_t)rand();
1154 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001155 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001156 if(!persistentlyMapped)
1157 {
1158 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1159 }
1160
1161 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001162 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001163 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1164 data[i] = value++;
1165
1166 if(!persistentlyMapped)
1167 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1168 }
1169}
1170
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001171static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001172{
1173 outAllocation.m_Allocation = nullptr;
1174 outAllocation.m_Buffer = nullptr;
1175 outAllocation.m_Image = nullptr;
1176 outAllocation.m_StartValue = (uint32_t)rand();
1177
1178 VmaAllocationCreateInfo vmaMemReq;
1179 GetMemReq(vmaMemReq);
1180
1181 VmaAllocationInfo allocInfo;
1182
1183 const bool isBuffer = true;//(rand() & 0x1) != 0;
1184 const bool isLarge = (rand() % 16) == 0;
1185 if(isBuffer)
1186 {
1187 const uint32_t bufferSize = isLarge ?
1188 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1189 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1190
1191 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1192 bufferInfo.size = bufferSize;
1193 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1194
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001195 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001196 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001197 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001198 }
1199 else
1200 {
1201 const uint32_t imageSizeX = isLarge ?
1202 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1203 rand() % 1024 + 1; // 1 ... 1024
1204 const uint32_t imageSizeY = isLarge ?
1205 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1206 rand() % 1024 + 1; // 1 ... 1024
1207
1208 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1209 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1210 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1211 imageInfo.extent.width = imageSizeX;
1212 imageInfo.extent.height = imageSizeY;
1213 imageInfo.extent.depth = 1;
1214 imageInfo.mipLevels = 1;
1215 imageInfo.arrayLayers = 1;
1216 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1217 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1218 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1219 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1220
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001221 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001222 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001223 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001224 }
1225
1226 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1227 if(allocInfo.pMappedData == nullptr)
1228 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001229 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001230 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001231 }
1232
1233 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001234 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001235 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1236 data[i] = value++;
1237
1238 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001239 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001240}
1241
1242static void DestroyAllocation(const AllocInfo& allocation)
1243{
1244 if(allocation.m_Buffer)
1245 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1246 else
1247 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1248}
1249
1250static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1251{
1252 for(size_t i = allocations.size(); i--; )
1253 DestroyAllocation(allocations[i]);
1254 allocations.clear();
1255}
1256
1257static void ValidateAllocationData(const AllocInfo& allocation)
1258{
1259 VmaAllocationInfo allocInfo;
1260 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1261
1262 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1263 if(allocInfo.pMappedData == nullptr)
1264 {
1265 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001266 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001267 }
1268
1269 uint32_t value = allocation.m_StartValue;
1270 bool ok = true;
1271 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001272 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001273 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1274 {
1275 if(data[i] != value++)
1276 {
1277 ok = false;
1278 break;
1279 }
1280 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001281 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001282
1283 if(allocInfo.pMappedData == nullptr)
1284 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1285}
1286
1287static void RecreateAllocationResource(AllocInfo& allocation)
1288{
1289 VmaAllocationInfo allocInfo;
1290 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1291
1292 if(allocation.m_Buffer)
1293 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001294 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001295
Adam Sawicki1f84f622019-07-02 13:40:01 +02001296 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, g_Allocs, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001297 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001298
1299 // Just to silence validation layer warnings.
1300 VkMemoryRequirements vkMemReq;
1301 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001302 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001303
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001304 res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001305 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001306 }
1307 else
1308 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001309 vkDestroyImage(g_hDevice, allocation.m_Image, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001310
Adam Sawicki1f84f622019-07-02 13:40:01 +02001311 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, g_Allocs, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001312 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001313
1314 // Just to silence validation layer warnings.
1315 VkMemoryRequirements vkMemReq;
1316 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1317
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001318 res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001319 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001320 }
1321}
1322
1323static void Defragment(AllocInfo* allocs, size_t allocCount,
1324 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1325 VmaDefragmentationStats* defragmentationStats = nullptr)
1326{
1327 std::vector<VmaAllocation> vmaAllocs(allocCount);
1328 for(size_t i = 0; i < allocCount; ++i)
1329 vmaAllocs[i] = allocs[i].m_Allocation;
1330
1331 std::vector<VkBool32> allocChanged(allocCount);
1332
1333 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1334 defragmentationInfo, defragmentationStats) );
1335
1336 for(size_t i = 0; i < allocCount; ++i)
1337 {
1338 if(allocChanged[i])
1339 {
1340 RecreateAllocationResource(allocs[i]);
1341 }
1342 }
1343}
1344
1345static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1346{
1347 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1348 ValidateAllocationData(allocInfo);
1349 });
1350}
1351
1352void TestDefragmentationSimple()
1353{
1354 wprintf(L"Test defragmentation simple\n");
1355
1356 RandomNumberGenerator rand(667);
1357
1358 const VkDeviceSize BUF_SIZE = 0x10000;
1359 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1360
1361 const VkDeviceSize MIN_BUF_SIZE = 32;
1362 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1363 auto RandomBufSize = [&]() -> VkDeviceSize {
1364 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1365 };
1366
1367 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1368 bufCreateInfo.size = BUF_SIZE;
1369 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1370
1371 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1372 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1373
1374 uint32_t memTypeIndex = UINT32_MAX;
1375 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1376
1377 VmaPoolCreateInfo poolCreateInfo = {};
1378 poolCreateInfo.blockSize = BLOCK_SIZE;
1379 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1380
1381 VmaPool pool;
1382 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1383
Adam Sawickie1681912018-11-23 17:50:12 +01001384 // Defragmentation of empty pool.
1385 {
1386 VmaDefragmentationInfo2 defragInfo = {};
1387 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1388 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1389 defragInfo.poolCount = 1;
1390 defragInfo.pPools = &pool;
1391
1392 VmaDefragmentationStats defragStats = {};
1393 VmaDefragmentationContext defragCtx = nullptr;
1394 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1395 TEST(res >= VK_SUCCESS);
1396 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1397 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1398 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1399 }
1400
Adam Sawickib8333fb2018-03-13 16:15:53 +01001401 std::vector<AllocInfo> allocations;
1402
1403 // persistentlyMappedOption = 0 - not persistently mapped.
1404 // persistentlyMappedOption = 1 - persistently mapped.
1405 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1406 {
1407 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1408 const bool persistentlyMapped = persistentlyMappedOption != 0;
1409
1410 // # Test 1
1411 // Buffers of fixed size.
1412 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1413 // Expected result: at least 1 block freed.
1414 {
1415 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1416 {
1417 AllocInfo allocInfo;
1418 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1419 allocations.push_back(allocInfo);
1420 }
1421
1422 for(size_t i = 1; i < allocations.size(); ++i)
1423 {
1424 DestroyAllocation(allocations[i]);
1425 allocations.erase(allocations.begin() + i);
1426 }
1427
1428 VmaDefragmentationStats defragStats;
1429 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001430 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1431 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001432
1433 ValidateAllocationsData(allocations.data(), allocations.size());
1434
1435 DestroyAllAllocations(allocations);
1436 }
1437
1438 // # Test 2
1439 // Buffers of fixed size.
1440 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1441 // Expected result: Each of 4 interations makes some progress.
1442 {
1443 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1444 {
1445 AllocInfo allocInfo;
1446 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1447 allocations.push_back(allocInfo);
1448 }
1449
1450 for(size_t i = 1; i < allocations.size(); ++i)
1451 {
1452 DestroyAllocation(allocations[i]);
1453 allocations.erase(allocations.begin() + i);
1454 }
1455
1456 VmaDefragmentationInfo defragInfo = {};
1457 defragInfo.maxAllocationsToMove = 1;
1458 defragInfo.maxBytesToMove = BUF_SIZE;
1459
1460 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1461 {
1462 VmaDefragmentationStats defragStats;
1463 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001464 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001465 }
1466
1467 ValidateAllocationsData(allocations.data(), allocations.size());
1468
1469 DestroyAllAllocations(allocations);
1470 }
1471
1472 // # Test 3
1473 // Buffers of variable size.
1474 // Create a number of buffers. Remove some percent of them.
1475 // Defragment while having some percent of them unmovable.
1476 // Expected result: Just simple validation.
1477 {
1478 for(size_t i = 0; i < 100; ++i)
1479 {
1480 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1481 localBufCreateInfo.size = RandomBufSize();
1482
1483 AllocInfo allocInfo;
1484 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1485 allocations.push_back(allocInfo);
1486 }
1487
1488 const uint32_t percentToDelete = 60;
1489 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1490 for(size_t i = 0; i < numberToDelete; ++i)
1491 {
1492 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1493 DestroyAllocation(allocations[indexToDelete]);
1494 allocations.erase(allocations.begin() + indexToDelete);
1495 }
1496
1497 // Non-movable allocations will be at the beginning of allocations array.
1498 const uint32_t percentNonMovable = 20;
1499 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1500 for(size_t i = 0; i < numberNonMovable; ++i)
1501 {
1502 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1503 if(indexNonMovable != i)
1504 std::swap(allocations[i], allocations[indexNonMovable]);
1505 }
1506
1507 VmaDefragmentationStats defragStats;
1508 Defragment(
1509 allocations.data() + numberNonMovable,
1510 allocations.size() - numberNonMovable,
1511 nullptr, &defragStats);
1512
1513 ValidateAllocationsData(allocations.data(), allocations.size());
1514
1515 DestroyAllAllocations(allocations);
1516 }
1517 }
1518
Adam Sawicki647cf242018-11-23 17:58:00 +01001519 /*
1520 Allocation that must be move to an overlapping place using memmove().
1521 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1522 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001523 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001524 {
1525 AllocInfo allocInfo[2];
1526
1527 bufCreateInfo.size = BUF_SIZE;
1528 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1529 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1530 bufCreateInfo.size = biggerBufSize;
1531 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1532
1533 DestroyAllocation(allocInfo[0]);
1534
1535 VmaDefragmentationStats defragStats;
1536 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1537 // If this fails, it means we couldn't do memmove with overlapping regions.
1538 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1539
1540 ValidateAllocationsData(&allocInfo[1], 1);
1541 DestroyAllocation(allocInfo[1]);
1542 }
1543
Adam Sawickib8333fb2018-03-13 16:15:53 +01001544 vmaDestroyPool(g_hAllocator, pool);
1545}
1546
Adam Sawicki52076eb2018-11-22 16:14:50 +01001547void TestDefragmentationWholePool()
1548{
1549 wprintf(L"Test defragmentation whole pool\n");
1550
1551 RandomNumberGenerator rand(668);
1552
1553 const VkDeviceSize BUF_SIZE = 0x10000;
1554 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1555
1556 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1557 bufCreateInfo.size = BUF_SIZE;
1558 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1559
1560 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1561 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1562
1563 uint32_t memTypeIndex = UINT32_MAX;
1564 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1565
1566 VmaPoolCreateInfo poolCreateInfo = {};
1567 poolCreateInfo.blockSize = BLOCK_SIZE;
1568 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1569
1570 VmaDefragmentationStats defragStats[2];
1571 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1572 {
1573 VmaPool pool;
1574 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1575
1576 std::vector<AllocInfo> allocations;
1577
1578 // Buffers of fixed size.
1579 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1580 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1581 {
1582 AllocInfo allocInfo;
1583 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1584 allocations.push_back(allocInfo);
1585 }
1586
1587 for(size_t i = 1; i < allocations.size(); ++i)
1588 {
1589 DestroyAllocation(allocations[i]);
1590 allocations.erase(allocations.begin() + i);
1591 }
1592
1593 VmaDefragmentationInfo2 defragInfo = {};
1594 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1595 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1596 std::vector<VmaAllocation> allocationsToDefrag;
1597 if(caseIndex == 0)
1598 {
1599 defragInfo.poolCount = 1;
1600 defragInfo.pPools = &pool;
1601 }
1602 else
1603 {
1604 const size_t allocCount = allocations.size();
1605 allocationsToDefrag.resize(allocCount);
1606 std::transform(
1607 allocations.begin(), allocations.end(),
1608 allocationsToDefrag.begin(),
1609 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1610 defragInfo.allocationCount = (uint32_t)allocCount;
1611 defragInfo.pAllocations = allocationsToDefrag.data();
1612 }
1613
1614 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1615 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1616 TEST(res >= VK_SUCCESS);
1617 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1618
1619 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1620
1621 ValidateAllocationsData(allocations.data(), allocations.size());
1622
1623 DestroyAllAllocations(allocations);
1624
1625 vmaDestroyPool(g_hAllocator, pool);
1626 }
1627
1628 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1629 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1630 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1631 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1632}
1633
Adam Sawickib8333fb2018-03-13 16:15:53 +01001634void TestDefragmentationFull()
1635{
1636 std::vector<AllocInfo> allocations;
1637
1638 // Create initial allocations.
1639 for(size_t i = 0; i < 400; ++i)
1640 {
1641 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001642 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001643 allocations.push_back(allocation);
1644 }
1645
1646 // Delete random allocations
1647 const size_t allocationsToDeletePercent = 80;
1648 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1649 for(size_t i = 0; i < allocationsToDelete; ++i)
1650 {
1651 size_t index = (size_t)rand() % allocations.size();
1652 DestroyAllocation(allocations[index]);
1653 allocations.erase(allocations.begin() + index);
1654 }
1655
1656 for(size_t i = 0; i < allocations.size(); ++i)
1657 ValidateAllocationData(allocations[i]);
1658
Adam Sawicki0667e332018-08-24 17:26:44 +02001659 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001660
1661 {
1662 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1663 for(size_t i = 0; i < allocations.size(); ++i)
1664 vmaAllocations[i] = allocations[i].m_Allocation;
1665
1666 const size_t nonMovablePercent = 0;
1667 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1668 for(size_t i = 0; i < nonMovableCount; ++i)
1669 {
1670 size_t index = (size_t)rand() % vmaAllocations.size();
1671 vmaAllocations.erase(vmaAllocations.begin() + index);
1672 }
1673
1674 const uint32_t defragCount = 1;
1675 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1676 {
1677 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1678
1679 VmaDefragmentationInfo defragmentationInfo;
1680 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1681 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1682
1683 wprintf(L"Defragmentation #%u\n", defragIndex);
1684
1685 time_point begTime = std::chrono::high_resolution_clock::now();
1686
1687 VmaDefragmentationStats stats;
1688 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001689 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001690
1691 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1692
1693 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1694 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1695 wprintf(L"Time: %.2f s\n", defragmentDuration);
1696
1697 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1698 {
1699 if(allocationsChanged[i])
1700 {
1701 RecreateAllocationResource(allocations[i]);
1702 }
1703 }
1704
1705 for(size_t i = 0; i < allocations.size(); ++i)
1706 ValidateAllocationData(allocations[i]);
1707
Adam Sawicki0667e332018-08-24 17:26:44 +02001708 //wchar_t fileName[MAX_PATH];
1709 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1710 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001711 }
1712 }
1713
1714 // Destroy all remaining allocations.
1715 DestroyAllAllocations(allocations);
1716}
1717
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001718static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001719{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001720 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001721 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001722
1723 std::vector<AllocInfo> allocations;
1724
1725 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001726 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1727 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001728 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001729 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1730 const size_t percentToLeave = 30;
1731 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001732 RandomNumberGenerator rand = { 234522 };
1733
1734 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001735
1736 VmaAllocationCreateInfo allocCreateInfo = {};
1737 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001738 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001739
1740 // Create all intended buffers.
1741 for(size_t i = 0; i < bufCount; ++i)
1742 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001743 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1744
1745 if(rand.Generate() % 100 < percentNonMovable)
1746 {
1747 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1748 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1749 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1750 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1751 }
1752 else
1753 {
1754 // Different usage just to see different color in output from VmaDumpVis.
1755 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1756 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1757 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1758 // And in JSON dump.
1759 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1760 }
1761
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001762 AllocInfo alloc;
1763 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1764 alloc.m_StartValue = rand.Generate();
1765 allocations.push_back(alloc);
1766 }
1767
1768 // Destroy some percentage of them.
1769 {
1770 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1771 for(size_t i = 0; i < buffersToDestroy; ++i)
1772 {
1773 const size_t index = rand.Generate() % allocations.size();
1774 allocations[index].Destroy();
1775 allocations.erase(allocations.begin() + index);
1776 }
1777 }
1778
1779 // Fill them with meaningful data.
1780 UploadGpuData(allocations.data(), allocations.size());
1781
Adam Sawickic6ede152018-11-16 17:04:14 +01001782 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001783 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001784 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001785
1786 // Defragment using GPU only.
1787 {
1788 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001789
Adam Sawickic6ede152018-11-16 17:04:14 +01001790 std::vector<VmaAllocation> allocationPtrs;
1791 std::vector<VkBool32> allocationChanged;
1792 std::vector<size_t> allocationOriginalIndex;
1793
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001794 for(size_t i = 0; i < allocCount; ++i)
1795 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001796 VmaAllocationInfo allocInfo = {};
1797 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1798 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1799 {
1800 allocationPtrs.push_back(allocations[i].m_Allocation);
1801 allocationChanged.push_back(VK_FALSE);
1802 allocationOriginalIndex.push_back(i);
1803 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001804 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001805
1806 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001807
1808 BeginSingleTimeCommands();
1809
1810 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001811 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001812 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001813 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001814 defragInfo.pAllocationsChanged = allocationChanged.data();
1815 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001816 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1817 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1818
1819 VmaDefragmentationStats stats = {};
1820 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1821 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1822 TEST(res >= VK_SUCCESS);
1823
1824 EndSingleTimeCommands();
1825
1826 vmaDefragmentationEnd(g_hAllocator, ctx);
1827
Adam Sawickic6ede152018-11-16 17:04:14 +01001828 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001829 {
1830 if(allocationChanged[i])
1831 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001832 const size_t origAllocIndex = allocationOriginalIndex[i];
1833 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001834 }
1835 }
1836
Adam Sawicki4d844e22019-01-24 16:21:05 +01001837 // If corruption detection is enabled, GPU defragmentation may not work on
1838 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001839 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001840 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1841 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001842 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001843 }
1844
1845 ValidateGpuData(allocations.data(), allocations.size());
1846
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001847 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001848 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001849
1850 // Destroy all remaining buffers.
1851 for(size_t i = allocations.size(); i--; )
1852 {
1853 allocations[i].Destroy();
1854 }
Adam Sawicki05704002018-11-08 16:07:29 +01001855
1856 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001857}
1858
Adam Sawickic467e282019-12-23 16:38:31 +01001859static void ProcessDefragmentationStepInfo(VmaDefragmentationPassInfo &stepInfo)
Adam Sawickia52012d2019-12-23 15:28:51 +01001860{
1861 std::vector<VkImageMemoryBarrier> beginImageBarriers;
1862 std::vector<VkImageMemoryBarrier> finalizeImageBarriers;
1863
1864 VkPipelineStageFlags beginSrcStageMask = 0;
1865 VkPipelineStageFlags beginDstStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT;
1866
1867 VkPipelineStageFlags finalizeSrcStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT;
1868 VkPipelineStageFlags finalizeDstStageMask = 0;
1869
1870 bool wantsMemoryBarrier = false;
1871
1872 VkMemoryBarrier beginMemoryBarrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER };
1873 VkMemoryBarrier finalizeMemoryBarrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER };
1874
Adam Sawickic467e282019-12-23 16:38:31 +01001875 for(uint32_t i = 0; i < stepInfo.moveCount; ++i)
Adam Sawickia52012d2019-12-23 15:28:51 +01001876 {
1877 VmaAllocationInfo info;
1878 vmaGetAllocationInfo(g_hAllocator, stepInfo.pMoves[i].allocation, &info);
1879
1880 AllocInfo *allocInfo = (AllocInfo *)info.pUserData;
1881
1882 if(allocInfo->m_Image)
1883 {
1884 VkImage newImage;
1885
1886 const VkResult result = vkCreateImage(g_hDevice, &allocInfo->m_ImageInfo, g_Allocs, &newImage);
1887 TEST(result >= VK_SUCCESS);
1888
1889 vkBindImageMemory(g_hDevice, newImage, stepInfo.pMoves[i].memory, stepInfo.pMoves[i].offset);
Adam Sawickic467e282019-12-23 16:38:31 +01001890 allocInfo->m_NewImage = newImage;
Adam Sawickia52012d2019-12-23 15:28:51 +01001891
1892 // Keep track of our pipeline stages that we need to wait/signal on
1893 beginSrcStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1894 finalizeDstStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1895
1896 // We need one pipeline barrier and two image layout transitions here
1897 // First we'll have to turn our newly created image into VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
1898 // And the second one is turning the old image into VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
1899
1900 VkImageSubresourceRange subresourceRange = {
1901 VK_IMAGE_ASPECT_COLOR_BIT,
1902 0, VK_REMAINING_MIP_LEVELS,
1903 0, VK_REMAINING_ARRAY_LAYERS
1904 };
1905
1906 VkImageMemoryBarrier barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
1907 barrier.srcAccessMask = 0;
1908 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1909 barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1910 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1911 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1912 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1913 barrier.image = newImage;
1914 barrier.subresourceRange = subresourceRange;
1915
1916 beginImageBarriers.push_back(barrier);
1917
1918 // Second barrier to convert the existing image. This one actually needs a real barrier
1919 barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT;
1920 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1921 barrier.oldLayout = allocInfo->m_ImageLayout;
1922 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
1923 barrier.image = allocInfo->m_Image;
1924
1925 beginImageBarriers.push_back(barrier);
1926
1927 // And lastly we need a barrier that turns our new image into the layout of the old one
1928 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1929 barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
1930 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1931 barrier.newLayout = allocInfo->m_ImageLayout;
1932 barrier.image = newImage;
1933
1934 finalizeImageBarriers.push_back(barrier);
1935 }
1936 else if(allocInfo->m_Buffer)
1937 {
1938 VkBuffer newBuffer;
1939
1940 const VkResult result = vkCreateBuffer(g_hDevice, &allocInfo->m_BufferInfo, g_Allocs, &newBuffer);
1941 TEST(result >= VK_SUCCESS);
1942
1943 vkBindBufferMemory(g_hDevice, newBuffer, stepInfo.pMoves[i].memory, stepInfo.pMoves[i].offset);
Adam Sawickic467e282019-12-23 16:38:31 +01001944 allocInfo->m_NewBuffer = newBuffer;
Adam Sawickia52012d2019-12-23 15:28:51 +01001945
1946 // Keep track of our pipeline stages that we need to wait/signal on
1947 beginSrcStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1948 finalizeDstStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1949
1950 beginMemoryBarrier.srcAccessMask |= VK_ACCESS_MEMORY_WRITE_BIT;
1951 beginMemoryBarrier.dstAccessMask |= VK_ACCESS_TRANSFER_READ_BIT;
1952
1953 finalizeMemoryBarrier.srcAccessMask |= VK_ACCESS_TRANSFER_WRITE_BIT;
1954 finalizeMemoryBarrier.dstAccessMask |= VK_ACCESS_MEMORY_READ_BIT;
1955
1956 wantsMemoryBarrier = true;
1957 }
1958 }
1959
1960 if(!beginImageBarriers.empty() || wantsMemoryBarrier)
1961 {
1962 const uint32_t memoryBarrierCount = wantsMemoryBarrier ? 1 : 0;
1963
1964 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, beginSrcStageMask, beginDstStageMask, 0,
1965 memoryBarrierCount, &beginMemoryBarrier,
1966 0, nullptr,
1967 (uint32_t)beginImageBarriers.size(), beginImageBarriers.data());
1968 }
1969
1970 for(uint32_t i = 0; i < stepInfo.moveCount; ++ i)
1971 {
1972 VmaAllocationInfo info;
1973 vmaGetAllocationInfo(g_hAllocator, stepInfo.pMoves[i].allocation, &info);
1974
1975 AllocInfo *allocInfo = (AllocInfo *)info.pUserData;
1976
1977 if(allocInfo->m_Image)
1978 {
1979 std::vector<VkImageCopy> imageCopies;
1980
1981 // Copy all mips of the source image into the target image
1982 VkOffset3D offset = { 0, 0, 0 };
1983 VkExtent3D extent = allocInfo->m_ImageInfo.extent;
1984
1985 VkImageSubresourceLayers subresourceLayers = {
1986 VK_IMAGE_ASPECT_COLOR_BIT,
1987 0,
1988 0, 1
1989 };
1990
1991 for(uint32_t mip = 0; mip < allocInfo->m_ImageInfo.mipLevels; ++ mip)
1992 {
1993 subresourceLayers.mipLevel = mip;
1994
1995 VkImageCopy imageCopy{
1996 subresourceLayers,
1997 offset,
1998 subresourceLayers,
1999 offset,
2000 extent
2001 };
2002
2003 imageCopies.push_back(imageCopy);
2004
2005 extent.width = std::max(uint32_t(1), extent.width >> 1);
2006 extent.height = std::max(uint32_t(1), extent.height >> 1);
2007 extent.depth = std::max(uint32_t(1), extent.depth >> 1);
2008 }
2009
2010 vkCmdCopyImage(
2011 g_hTemporaryCommandBuffer,
2012 allocInfo->m_Image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
Adam Sawickic467e282019-12-23 16:38:31 +01002013 allocInfo->m_NewImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
Adam Sawickia52012d2019-12-23 15:28:51 +01002014 (uint32_t)imageCopies.size(), imageCopies.data());
Adam Sawickia52012d2019-12-23 15:28:51 +01002015 }
2016 else if(allocInfo->m_Buffer)
2017 {
2018 VkBufferCopy region = {
2019 0,
2020 0,
2021 allocInfo->m_BufferInfo.size };
2022
2023 vkCmdCopyBuffer(g_hTemporaryCommandBuffer,
Adam Sawickic467e282019-12-23 16:38:31 +01002024 allocInfo->m_Buffer, allocInfo->m_NewBuffer,
Adam Sawickia52012d2019-12-23 15:28:51 +01002025 1, &region);
Adam Sawickia52012d2019-12-23 15:28:51 +01002026 }
2027 }
2028
Adam Sawickia52012d2019-12-23 15:28:51 +01002029 if(!finalizeImageBarriers.empty() || wantsMemoryBarrier)
2030 {
2031 const uint32_t memoryBarrierCount = wantsMemoryBarrier ? 1 : 0;
2032
2033 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, finalizeSrcStageMask, finalizeDstStageMask, 0,
2034 memoryBarrierCount, &finalizeMemoryBarrier,
2035 0, nullptr,
2036 (uint32_t)finalizeImageBarriers.size(), finalizeImageBarriers.data());
2037 }
2038}
2039
2040
2041static void TestDefragmentationIncrementalBasic()
2042{
2043 wprintf(L"Test defragmentation incremental basic\n");
2044 g_MemoryAliasingWarningEnabled = false;
2045
2046 std::vector<AllocInfo> allocations;
2047
2048 // Create that many allocations to surely fill 3 new blocks of 256 MB.
2049 const std::array<uint32_t, 3> imageSizes = { 256, 512, 1024 };
2050 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
2051 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
2052 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic467e282019-12-23 16:38:31 +01002053 const size_t imageCount = totalSize / ((size_t)imageSizes[0] * imageSizes[0] * 4) / 2;
Adam Sawickia52012d2019-12-23 15:28:51 +01002054 const size_t bufCount = (size_t)(totalSize / bufSizeMin) / 2;
2055 const size_t percentToLeave = 30;
2056 RandomNumberGenerator rand = { 234522 };
2057
2058 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
2059 imageInfo.imageType = VK_IMAGE_TYPE_2D;
2060 imageInfo.extent.depth = 1;
2061 imageInfo.mipLevels = 1;
2062 imageInfo.arrayLayers = 1;
2063 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
2064 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
2065 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2066 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2067 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2068
2069 VmaAllocationCreateInfo allocCreateInfo = {};
2070 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2071 allocCreateInfo.flags = 0;
2072
2073 // Create all intended images.
2074 for(size_t i = 0; i < imageCount; ++i)
2075 {
2076 const uint32_t size = imageSizes[rand.Generate() % 3];
2077
2078 imageInfo.extent.width = size;
2079 imageInfo.extent.height = size;
2080
2081 AllocInfo alloc;
2082 alloc.CreateImage(imageInfo, allocCreateInfo, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
2083 alloc.m_StartValue = 0;
2084
2085 allocations.push_back(alloc);
2086 }
2087
2088 // And all buffers
2089 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2090
2091 for(size_t i = 0; i < bufCount; ++i)
2092 {
2093 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
2094 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2095
2096 AllocInfo alloc;
2097 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
2098 alloc.m_StartValue = 0;
2099
2100 allocations.push_back(alloc);
2101 }
2102
2103 // Destroy some percentage of them.
2104 {
2105 const size_t allocationsToDestroy = round_div<size_t>((imageCount + bufCount) * (100 - percentToLeave), 100);
2106 for(size_t i = 0; i < allocationsToDestroy; ++i)
2107 {
2108 const size_t index = rand.Generate() % allocations.size();
2109 allocations[index].Destroy();
2110 allocations.erase(allocations.begin() + index);
2111 }
2112 }
2113
2114 {
2115 // Set our user data pointers. A real application should probably be more clever here
2116 const size_t allocationCount = allocations.size();
2117 for(size_t i = 0; i < allocationCount; ++i)
2118 {
2119 AllocInfo &alloc = allocations[i];
2120 vmaSetAllocationUserData(g_hAllocator, alloc.m_Allocation, &alloc);
2121 }
2122 }
2123
2124 // Fill them with meaningful data.
2125 UploadGpuData(allocations.data(), allocations.size());
2126
2127 wchar_t fileName[MAX_PATH];
2128 swprintf_s(fileName, L"GPU_defragmentation_incremental_basic_A_before.json");
2129 SaveAllocatorStatsToFile(fileName);
2130
2131 // Defragment using GPU only.
2132 {
2133 const size_t allocCount = allocations.size();
2134
2135 std::vector<VmaAllocation> allocationPtrs;
2136
2137 for(size_t i = 0; i < allocCount; ++i)
2138 {
Adam Sawickia52012d2019-12-23 15:28:51 +01002139 allocationPtrs.push_back(allocations[i].m_Allocation);
2140 }
2141
2142 const size_t movableAllocCount = allocationPtrs.size();
2143
2144 VmaDefragmentationInfo2 defragInfo = {};
2145 defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_INCREMENTAL;
2146 defragInfo.allocationCount = (uint32_t)movableAllocCount;
2147 defragInfo.pAllocations = allocationPtrs.data();
2148 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
2149 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
2150
2151 VmaDefragmentationStats stats = {};
2152 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
2153 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
2154 TEST(res >= VK_SUCCESS);
2155
2156 res = VK_NOT_READY;
2157
Adam Sawickic467e282019-12-23 16:38:31 +01002158 std::vector<VmaDefragmentationPassMoveInfo> moveInfo;
Adam Sawickia52012d2019-12-23 15:28:51 +01002159 moveInfo.resize(movableAllocCount);
2160
2161 while(res == VK_NOT_READY)
2162 {
Adam Sawickic467e282019-12-23 16:38:31 +01002163 VmaDefragmentationPassInfo stepInfo = {};
Adam Sawickia52012d2019-12-23 15:28:51 +01002164 stepInfo.pMoves = moveInfo.data();
2165 stepInfo.moveCount = (uint32_t)moveInfo.size();
2166
Adam Sawickic467e282019-12-23 16:38:31 +01002167 res = vmaBeginDefragmentationPass(g_hAllocator, ctx, &stepInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +01002168 TEST(res >= VK_SUCCESS);
2169
2170 BeginSingleTimeCommands();
Adam Sawickic467e282019-12-23 16:38:31 +01002171 std::vector<void*> newHandles;
Adam Sawickia52012d2019-12-23 15:28:51 +01002172 ProcessDefragmentationStepInfo(stepInfo);
2173 EndSingleTimeCommands();
2174
Adam Sawickic467e282019-12-23 16:38:31 +01002175 res = vmaEndDefragmentationPass(g_hAllocator, ctx);
2176
2177 // Destroy old buffers/images and replace them with new handles.
2178 for(size_t i = 0; i < stepInfo.moveCount; ++i)
2179 {
2180 VmaAllocation const alloc = stepInfo.pMoves[i].allocation;
2181 VmaAllocationInfo vmaAllocInfo;
2182 vmaGetAllocationInfo(g_hAllocator, alloc, &vmaAllocInfo);
2183 AllocInfo* allocInfo = (AllocInfo*)vmaAllocInfo.pUserData;
2184 if(allocInfo->m_Buffer)
2185 {
2186 assert(allocInfo->m_NewBuffer && !allocInfo->m_Image && !allocInfo->m_NewImage);
2187 vkDestroyBuffer(g_hDevice, allocInfo->m_Buffer, g_Allocs);
2188 allocInfo->m_Buffer = allocInfo->m_NewBuffer;
2189 allocInfo->m_NewBuffer = VK_NULL_HANDLE;
2190 }
2191 else if(allocInfo->m_Image)
2192 {
2193 assert(allocInfo->m_NewImage && !allocInfo->m_Buffer && !allocInfo->m_NewBuffer);
2194 vkDestroyImage(g_hDevice, allocInfo->m_Image, g_Allocs);
2195 allocInfo->m_Image = allocInfo->m_NewImage;
2196 allocInfo->m_NewImage = VK_NULL_HANDLE;
2197 }
2198 else
2199 assert(0);
2200 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002201 }
2202
2203 TEST(res >= VK_SUCCESS);
2204 vmaDefragmentationEnd(g_hAllocator, ctx);
2205
2206 // If corruption detection is enabled, GPU defragmentation may not work on
2207 // memory types that have this detection active, e.g. on Intel.
2208#if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
2209 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
2210 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
2211#endif
2212 }
2213
2214 //ValidateGpuData(allocations.data(), allocations.size());
2215
2216 swprintf_s(fileName, L"GPU_defragmentation_incremental_basic_B_after.json");
2217 SaveAllocatorStatsToFile(fileName);
2218
Adam Sawickic467e282019-12-23 16:38:31 +01002219 // Destroy all remaining buffers and images.
Adam Sawickia52012d2019-12-23 15:28:51 +01002220 for(size_t i = allocations.size(); i--; )
2221 {
2222 allocations[i].Destroy();
2223 }
2224
2225 g_MemoryAliasingWarningEnabled = true;
2226}
2227
2228void TestDefragmentationIncrementalComplex()
2229{
2230 wprintf(L"Test defragmentation incremental complex\n");
2231 g_MemoryAliasingWarningEnabled = false;
2232
2233 std::vector<AllocInfo> allocations;
2234
2235 // Create that many allocations to surely fill 3 new blocks of 256 MB.
2236 const std::array<uint32_t, 3> imageSizes = { 256, 512, 1024 };
2237 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
2238 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
2239 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
2240 const size_t imageCount = (size_t)(totalSize / (imageSizes[0] * imageSizes[0] * 4)) / 2;
2241 const size_t bufCount = (size_t)(totalSize / bufSizeMin) / 2;
2242 const size_t percentToLeave = 30;
2243 RandomNumberGenerator rand = { 234522 };
2244
2245 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
2246 imageInfo.imageType = VK_IMAGE_TYPE_2D;
2247 imageInfo.extent.depth = 1;
2248 imageInfo.mipLevels = 1;
2249 imageInfo.arrayLayers = 1;
2250 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
2251 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
2252 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2253 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2254 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2255
2256 VmaAllocationCreateInfo allocCreateInfo = {};
2257 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2258 allocCreateInfo.flags = 0;
2259
2260 // Create all intended images.
2261 for(size_t i = 0; i < imageCount; ++i)
2262 {
2263 const uint32_t size = imageSizes[rand.Generate() % 3];
2264
2265 imageInfo.extent.width = size;
2266 imageInfo.extent.height = size;
2267
2268 AllocInfo alloc;
2269 alloc.CreateImage(imageInfo, allocCreateInfo, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
2270 alloc.m_StartValue = 0;
2271
2272 allocations.push_back(alloc);
2273 }
2274
2275 // And all buffers
2276 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2277
2278 for(size_t i = 0; i < bufCount; ++i)
2279 {
2280 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
2281 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2282
2283 AllocInfo alloc;
2284 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
2285 alloc.m_StartValue = 0;
2286
2287 allocations.push_back(alloc);
2288 }
2289
2290 // Destroy some percentage of them.
2291 {
2292 const size_t allocationsToDestroy = round_div<size_t>((imageCount + bufCount) * (100 - percentToLeave), 100);
2293 for(size_t i = 0; i < allocationsToDestroy; ++i)
2294 {
2295 const size_t index = rand.Generate() % allocations.size();
2296 allocations[index].Destroy();
2297 allocations.erase(allocations.begin() + index);
2298 }
2299 }
2300
2301 {
2302 // Set our user data pointers. A real application should probably be more clever here
2303 const size_t allocationCount = allocations.size();
2304 for(size_t i = 0; i < allocationCount; ++i)
2305 {
2306 AllocInfo &alloc = allocations[i];
2307 vmaSetAllocationUserData(g_hAllocator, alloc.m_Allocation, &alloc);
2308 }
2309 }
2310
2311 // Fill them with meaningful data.
2312 UploadGpuData(allocations.data(), allocations.size());
2313
2314 wchar_t fileName[MAX_PATH];
2315 swprintf_s(fileName, L"GPU_defragmentation_incremental_complex_A_before.json");
2316 SaveAllocatorStatsToFile(fileName);
2317
2318 std::vector<AllocInfo> additionalAllocations;
2319
2320#define MakeAdditionalAllocation() \
2321 do { \
2322 { \
2323 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16); \
2324 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT; \
2325 \
2326 AllocInfo alloc; \
2327 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo); \
2328 \
2329 additionalAllocations.push_back(alloc); \
2330 } \
2331 } while(0)
2332
2333 // Defragment using GPU only.
2334 {
2335 const size_t allocCount = allocations.size();
2336
2337 std::vector<VmaAllocation> allocationPtrs;
2338
2339 for(size_t i = 0; i < allocCount; ++i)
2340 {
2341 VmaAllocationInfo allocInfo = {};
2342 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
2343
2344 allocationPtrs.push_back(allocations[i].m_Allocation);
2345 }
2346
2347 const size_t movableAllocCount = allocationPtrs.size();
2348
2349 VmaDefragmentationInfo2 defragInfo = {};
2350 defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_INCREMENTAL;
2351 defragInfo.allocationCount = (uint32_t)movableAllocCount;
2352 defragInfo.pAllocations = allocationPtrs.data();
2353 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
2354 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
2355
2356 VmaDefragmentationStats stats = {};
2357 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
2358 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
2359 TEST(res >= VK_SUCCESS);
2360
2361 res = VK_NOT_READY;
2362
Adam Sawickic467e282019-12-23 16:38:31 +01002363 std::vector<VmaDefragmentationPassMoveInfo> moveInfo;
Adam Sawickia52012d2019-12-23 15:28:51 +01002364 moveInfo.resize(movableAllocCount);
2365
2366 MakeAdditionalAllocation();
2367
2368 while(res == VK_NOT_READY)
2369 {
Adam Sawickic467e282019-12-23 16:38:31 +01002370 VmaDefragmentationPassInfo stepInfo = {};
Adam Sawickia52012d2019-12-23 15:28:51 +01002371 stepInfo.pMoves = moveInfo.data();
2372 stepInfo.moveCount = (uint32_t)moveInfo.size();
2373
Adam Sawickic467e282019-12-23 16:38:31 +01002374 res = vmaBeginDefragmentationPass(g_hAllocator, ctx, &stepInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +01002375 TEST(res >= VK_SUCCESS);
2376
2377 MakeAdditionalAllocation();
2378
2379 BeginSingleTimeCommands();
2380 ProcessDefragmentationStepInfo(stepInfo);
2381 EndSingleTimeCommands();
2382
Adam Sawickic467e282019-12-23 16:38:31 +01002383 res = vmaEndDefragmentationPass(g_hAllocator, ctx);
2384
2385 // Destroy old buffers/images and replace them with new handles.
2386 for(size_t i = 0; i < stepInfo.moveCount; ++i)
2387 {
2388 VmaAllocation const alloc = stepInfo.pMoves[i].allocation;
2389 VmaAllocationInfo vmaAllocInfo;
2390 vmaGetAllocationInfo(g_hAllocator, alloc, &vmaAllocInfo);
2391 AllocInfo* allocInfo = (AllocInfo*)vmaAllocInfo.pUserData;
2392 if(allocInfo->m_Buffer)
2393 {
2394 assert(allocInfo->m_NewBuffer && !allocInfo->m_Image && !allocInfo->m_NewImage);
2395 vkDestroyBuffer(g_hDevice, allocInfo->m_Buffer, g_Allocs);
2396 allocInfo->m_Buffer = allocInfo->m_NewBuffer;
2397 allocInfo->m_NewBuffer = VK_NULL_HANDLE;
2398 }
2399 else if(allocInfo->m_Image)
2400 {
2401 assert(allocInfo->m_NewImage && !allocInfo->m_Buffer && !allocInfo->m_NewBuffer);
2402 vkDestroyImage(g_hDevice, allocInfo->m_Image, g_Allocs);
2403 allocInfo->m_Image = allocInfo->m_NewImage;
2404 allocInfo->m_NewImage = VK_NULL_HANDLE;
2405 }
2406 else
2407 assert(0);
2408 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002409
2410 MakeAdditionalAllocation();
2411 }
2412
2413 TEST(res >= VK_SUCCESS);
2414 vmaDefragmentationEnd(g_hAllocator, ctx);
2415
2416 // If corruption detection is enabled, GPU defragmentation may not work on
2417 // memory types that have this detection active, e.g. on Intel.
2418#if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
2419 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
2420 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
2421#endif
2422 }
2423
2424 //ValidateGpuData(allocations.data(), allocations.size());
2425
2426 swprintf_s(fileName, L"GPU_defragmentation_incremental_complex_B_after.json");
2427 SaveAllocatorStatsToFile(fileName);
2428
2429 // Destroy all remaining buffers.
2430 for(size_t i = allocations.size(); i--; )
2431 {
2432 allocations[i].Destroy();
2433 }
2434
2435 for(size_t i = additionalAllocations.size(); i--; )
2436 {
2437 additionalAllocations[i].Destroy();
2438 }
2439
2440 g_MemoryAliasingWarningEnabled = true;
2441}
2442
2443
Adam Sawickib8333fb2018-03-13 16:15:53 +01002444static void TestUserData()
2445{
2446 VkResult res;
2447
2448 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2449 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
2450 bufCreateInfo.size = 0x10000;
2451
2452 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
2453 {
2454 // Opaque pointer
2455 {
2456
2457 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
2458 void* pointerToSomething = &res;
2459
2460 VmaAllocationCreateInfo allocCreateInfo = {};
2461 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2462 allocCreateInfo.pUserData = numberAsPointer;
2463 if(testIndex == 1)
2464 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2465
2466 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2467 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002468 TEST(res == VK_SUCCESS);
2469 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002470
2471 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002472 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002473
2474 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
2475 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002476 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002477
2478 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2479 }
2480
2481 // String
2482 {
2483 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
2484 const char* name2 = "2";
2485 const size_t name1Len = strlen(name1);
2486
2487 char* name1Buf = new char[name1Len + 1];
2488 strcpy_s(name1Buf, name1Len + 1, name1);
2489
2490 VmaAllocationCreateInfo allocCreateInfo = {};
2491 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2492 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
2493 allocCreateInfo.pUserData = name1Buf;
2494 if(testIndex == 1)
2495 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2496
2497 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2498 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002499 TEST(res == VK_SUCCESS);
2500 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
2501 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002502
2503 delete[] name1Buf;
2504
2505 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002506 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002507
2508 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
2509 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002510 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002511
2512 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
2513 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002514 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002515
2516 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2517 }
2518 }
2519}
2520
Adam Sawicki370ab182018-11-08 16:31:00 +01002521static void TestInvalidAllocations()
2522{
2523 VkResult res;
2524
2525 VmaAllocationCreateInfo allocCreateInfo = {};
2526 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2527
2528 // Try to allocate 0 bytes.
2529 {
2530 VkMemoryRequirements memReq = {};
2531 memReq.size = 0; // !!!
2532 memReq.alignment = 4;
2533 memReq.memoryTypeBits = UINT32_MAX;
2534 VmaAllocation alloc = VK_NULL_HANDLE;
2535 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
2536 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
2537 }
2538
2539 // Try to create buffer with size = 0.
2540 {
2541 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2542 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2543 bufCreateInfo.size = 0; // !!!
2544 VkBuffer buf = VK_NULL_HANDLE;
2545 VmaAllocation alloc = VK_NULL_HANDLE;
2546 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
2547 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
2548 }
2549
2550 // Try to create image with one dimension = 0.
2551 {
2552 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2553 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
2554 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
2555 imageCreateInfo.extent.width = 128;
2556 imageCreateInfo.extent.height = 0; // !!!
2557 imageCreateInfo.extent.depth = 1;
2558 imageCreateInfo.mipLevels = 1;
2559 imageCreateInfo.arrayLayers = 1;
2560 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2561 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
2562 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2563 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2564 VkImage image = VK_NULL_HANDLE;
2565 VmaAllocation alloc = VK_NULL_HANDLE;
2566 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
2567 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
2568 }
2569}
2570
Adam Sawickib8333fb2018-03-13 16:15:53 +01002571static void TestMemoryRequirements()
2572{
2573 VkResult res;
2574 VkBuffer buf;
2575 VmaAllocation alloc;
2576 VmaAllocationInfo allocInfo;
2577
2578 const VkPhysicalDeviceMemoryProperties* memProps;
2579 vmaGetMemoryProperties(g_hAllocator, &memProps);
2580
2581 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2582 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2583 bufInfo.size = 128;
2584
2585 VmaAllocationCreateInfo allocCreateInfo = {};
2586
2587 // No requirements.
2588 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002589 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002590 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2591
2592 // Usage.
2593 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2594 allocCreateInfo.requiredFlags = 0;
2595 allocCreateInfo.preferredFlags = 0;
2596 allocCreateInfo.memoryTypeBits = UINT32_MAX;
2597
2598 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002599 TEST(res == VK_SUCCESS);
2600 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002601 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2602
2603 // Required flags, preferred flags.
2604 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
2605 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2606 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
2607 allocCreateInfo.memoryTypeBits = 0;
2608
2609 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002610 TEST(res == VK_SUCCESS);
2611 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2612 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002613 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2614
2615 // memoryTypeBits.
2616 const uint32_t memType = allocInfo.memoryType;
2617 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2618 allocCreateInfo.requiredFlags = 0;
2619 allocCreateInfo.preferredFlags = 0;
2620 allocCreateInfo.memoryTypeBits = 1u << memType;
2621
2622 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002623 TEST(res == VK_SUCCESS);
2624 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002625 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2626
2627}
2628
Adam Sawickia1d992f2020-03-02 15:32:10 +01002629static void TestGetAllocatorInfo()
2630{
2631 wprintf(L"Test vnaGetAllocatorInfo\n");
2632
2633 VmaAllocatorInfo allocInfo = {};
2634 vmaGetAllocatorInfo(g_hAllocator, &allocInfo);
2635 TEST(allocInfo.instance == g_hVulkanInstance);
2636 TEST(allocInfo.physicalDevice == g_hPhysicalDevice);
2637 TEST(allocInfo.device == g_hDevice);
2638}
2639
Adam Sawickib8333fb2018-03-13 16:15:53 +01002640static void TestBasics()
2641{
2642 VkResult res;
2643
Adam Sawickia1d992f2020-03-02 15:32:10 +01002644 TestGetAllocatorInfo();
2645
Adam Sawickib8333fb2018-03-13 16:15:53 +01002646 TestMemoryRequirements();
2647
2648 // Lost allocation
2649 {
2650 VmaAllocation alloc = VK_NULL_HANDLE;
2651 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002652 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002653
2654 VmaAllocationInfo allocInfo;
2655 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002656 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
2657 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002658
2659 vmaFreeMemory(g_hAllocator, alloc);
2660 }
2661
2662 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
2663 {
2664 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2665 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
2666 bufCreateInfo.size = 128;
2667
2668 VmaAllocationCreateInfo allocCreateInfo = {};
2669 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2670 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
2671
2672 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2673 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002674 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002675
2676 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2677
2678 // Same with OWN_MEMORY.
2679 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2680
2681 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002682 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002683
2684 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2685 }
2686
2687 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01002688
2689 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01002690}
2691
Adam Sawickiddcbf8c2019-11-22 15:22:42 +01002692static void TestPool_MinBlockCount()
2693{
2694#if defined(VMA_DEBUG_MARGIN) && VMA_DEBUG_MARGIN > 0
2695 return;
2696#endif
2697
2698 wprintf(L"Test Pool MinBlockCount\n");
2699 VkResult res;
2700
2701 static const VkDeviceSize ALLOC_SIZE = 512ull * 1024;
2702 static const VkDeviceSize BLOCK_SIZE = ALLOC_SIZE * 2; // Each block can fit 2 allocations.
2703
2704 VmaAllocationCreateInfo allocCreateInfo = {};
2705 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_COPY;
2706
2707 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2708 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2709 bufCreateInfo.size = ALLOC_SIZE;
2710
2711 VmaPoolCreateInfo poolCreateInfo = {};
2712 poolCreateInfo.blockSize = BLOCK_SIZE;
2713 poolCreateInfo.minBlockCount = 2; // At least 2 blocks always present.
2714 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
2715 TEST(res == VK_SUCCESS);
2716
2717 VmaPool pool = VK_NULL_HANDLE;
2718 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
2719 TEST(res == VK_SUCCESS && pool != VK_NULL_HANDLE);
2720
2721 // Check that there are 2 blocks preallocated as requested.
2722 VmaPoolStats begPoolStats = {};
2723 vmaGetPoolStats(g_hAllocator, pool, &begPoolStats);
2724 TEST(begPoolStats.blockCount == 2 && begPoolStats.allocationCount == 0 && begPoolStats.size == BLOCK_SIZE * 2);
2725
2726 // Allocate 5 buffers to create 3 blocks.
2727 static const uint32_t BUF_COUNT = 5;
2728 allocCreateInfo.pool = pool;
2729 std::vector<AllocInfo> allocs(BUF_COUNT);
2730 for(uint32_t i = 0; i < BUF_COUNT; ++i)
2731 {
2732 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &allocs[i].m_Buffer, &allocs[i].m_Allocation, nullptr);
2733 TEST(res == VK_SUCCESS && allocs[i].m_Buffer != VK_NULL_HANDLE && allocs[i].m_Allocation != VK_NULL_HANDLE);
2734 }
2735
2736 // Check that there are really 3 blocks.
2737 VmaPoolStats poolStats2 = {};
2738 vmaGetPoolStats(g_hAllocator, pool, &poolStats2);
2739 TEST(poolStats2.blockCount == 3 && poolStats2.allocationCount == BUF_COUNT && poolStats2.size == BLOCK_SIZE * 3);
2740
2741 // Free two first allocations to make one block empty.
2742 allocs[0].Destroy();
2743 allocs[1].Destroy();
2744
2745 // Check that there are still 3 blocks due to hysteresis.
2746 VmaPoolStats poolStats3 = {};
2747 vmaGetPoolStats(g_hAllocator, pool, &poolStats3);
2748 TEST(poolStats3.blockCount == 3 && poolStats3.allocationCount == BUF_COUNT - 2 && poolStats2.size == BLOCK_SIZE * 3);
2749
2750 // Free the last allocation to make second block empty.
2751 allocs[BUF_COUNT - 1].Destroy();
2752
2753 // Check that there are now 2 blocks only.
2754 VmaPoolStats poolStats4 = {};
2755 vmaGetPoolStats(g_hAllocator, pool, &poolStats4);
2756 TEST(poolStats4.blockCount == 2 && poolStats4.allocationCount == BUF_COUNT - 3 && poolStats4.size == BLOCK_SIZE * 2);
2757
2758 // Cleanup.
2759 for(size_t i = allocs.size(); i--; )
2760 {
2761 allocs[i].Destroy();
2762 }
2763 vmaDestroyPool(g_hAllocator, pool);
2764}
2765
Adam Sawickib8333fb2018-03-13 16:15:53 +01002766void TestHeapSizeLimit()
2767{
Adam Sawickib3f51102019-11-18 13:05:56 +01002768 const VkDeviceSize HEAP_SIZE_LIMIT = 200ull * 1024 * 1024; // 200 MB
2769 const VkDeviceSize BLOCK_SIZE = 20ull * 1024 * 1024; // 20 MB
Adam Sawickib8333fb2018-03-13 16:15:53 +01002770
2771 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
2772 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
2773 {
2774 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
2775 }
2776
2777 VmaAllocatorCreateInfo allocatorCreateInfo = {};
2778 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
2779 allocatorCreateInfo.device = g_hDevice;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002780 allocatorCreateInfo.instance = g_hVulkanInstance;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002781 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
2782
2783 VmaAllocator hAllocator;
2784 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002785 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002786
2787 struct Item
2788 {
2789 VkBuffer hBuf;
2790 VmaAllocation hAlloc;
2791 };
2792 std::vector<Item> items;
2793
2794 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2795 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2796
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002797 // 1. Allocate two blocks of dedicated memory, half the size of BLOCK_SIZE.
2798 VmaAllocationInfo dedicatedAllocInfo;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002799 {
2800 VmaAllocationCreateInfo allocCreateInfo = {};
2801 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2802 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2803
2804 bufCreateInfo.size = BLOCK_SIZE / 2;
2805
2806 for(size_t i = 0; i < 2; ++i)
2807 {
2808 Item item;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002809 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &dedicatedAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002810 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002811 items.push_back(item);
2812 }
2813 }
2814
2815 // Create pool to make sure allocations must be out of this memory type.
2816 VmaPoolCreateInfo poolCreateInfo = {};
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002817 poolCreateInfo.memoryTypeIndex = dedicatedAllocInfo.memoryType;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002818 poolCreateInfo.blockSize = BLOCK_SIZE;
2819
2820 VmaPool hPool;
2821 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002822 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002823
2824 // 2. Allocate normal buffers from all the remaining memory.
2825 {
2826 VmaAllocationCreateInfo allocCreateInfo = {};
2827 allocCreateInfo.pool = hPool;
2828
2829 bufCreateInfo.size = BLOCK_SIZE / 2;
2830
2831 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2832 for(size_t i = 0; i < bufCount; ++i)
2833 {
2834 Item item;
2835 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002836 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002837 items.push_back(item);
2838 }
2839 }
2840
2841 // 3. Allocation of one more (even small) buffer should fail.
2842 {
2843 VmaAllocationCreateInfo allocCreateInfo = {};
2844 allocCreateInfo.pool = hPool;
2845
2846 bufCreateInfo.size = 128;
2847
2848 VkBuffer hBuf;
2849 VmaAllocation hAlloc;
2850 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002851 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002852 }
2853
2854 // Destroy everything.
2855 for(size_t i = items.size(); i--; )
2856 {
2857 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2858 }
2859
2860 vmaDestroyPool(hAllocator, hPool);
2861
2862 vmaDestroyAllocator(hAllocator);
2863}
2864
Adam Sawicki212a4a62018-06-14 15:44:45 +02002865#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002866static void TestDebugMargin()
2867{
2868 if(VMA_DEBUG_MARGIN == 0)
2869 {
2870 return;
2871 }
2872
2873 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002874 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002875
2876 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002877 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002878
2879 // Create few buffers of different size.
2880 const size_t BUF_COUNT = 10;
2881 BufferInfo buffers[BUF_COUNT];
2882 VmaAllocationInfo allocInfo[BUF_COUNT];
2883 for(size_t i = 0; i < 10; ++i)
2884 {
2885 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002886 // Last one will be mapped.
2887 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002888
2889 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002890 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002891 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002892 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002893
2894 if(i == BUF_COUNT - 1)
2895 {
2896 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002897 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002898 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2899 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2900 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002901 }
2902
2903 // Check if their offsets preserve margin between them.
2904 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2905 {
2906 if(lhs.deviceMemory != rhs.deviceMemory)
2907 {
2908 return lhs.deviceMemory < rhs.deviceMemory;
2909 }
2910 return lhs.offset < rhs.offset;
2911 });
2912 for(size_t i = 1; i < BUF_COUNT; ++i)
2913 {
2914 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2915 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002916 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002917 }
2918 }
2919
Adam Sawicki212a4a62018-06-14 15:44:45 +02002920 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002921 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002922
Adam Sawicki73b16652018-06-11 16:39:25 +02002923 // Destroy all buffers.
2924 for(size_t i = BUF_COUNT; i--; )
2925 {
2926 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2927 }
2928}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002929#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002930
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002931static void TestLinearAllocator()
2932{
2933 wprintf(L"Test linear allocator\n");
2934
2935 RandomNumberGenerator rand{645332};
2936
2937 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2938 sampleBufCreateInfo.size = 1024; // Whatever.
2939 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2940
2941 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2942 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2943
2944 VmaPoolCreateInfo poolCreateInfo = {};
2945 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002946 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002947
Adam Sawickiee082772018-06-20 17:45:49 +02002948 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002949 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2950 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2951
2952 VmaPool pool = nullptr;
2953 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002954 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002955
2956 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2957
2958 VmaAllocationCreateInfo allocCreateInfo = {};
2959 allocCreateInfo.pool = pool;
2960
2961 constexpr size_t maxBufCount = 100;
2962 std::vector<BufferInfo> bufInfo;
2963
2964 constexpr VkDeviceSize bufSizeMin = 16;
2965 constexpr VkDeviceSize bufSizeMax = 1024;
2966 VmaAllocationInfo allocInfo;
2967 VkDeviceSize prevOffset = 0;
2968
2969 // Test one-time free.
2970 for(size_t i = 0; i < 2; ++i)
2971 {
2972 // Allocate number of buffers of varying size that surely fit into this block.
2973 VkDeviceSize bufSumSize = 0;
2974 for(size_t i = 0; i < maxBufCount; ++i)
2975 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002976 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002977 BufferInfo newBufInfo;
2978 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2979 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002980 TEST(res == VK_SUCCESS);
2981 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002982 bufInfo.push_back(newBufInfo);
2983 prevOffset = allocInfo.offset;
2984 bufSumSize += bufCreateInfo.size;
2985 }
2986
2987 // Validate pool stats.
2988 VmaPoolStats stats;
2989 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002990 TEST(stats.size == poolCreateInfo.blockSize);
2991 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2992 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002993
2994 // Destroy the buffers in random order.
2995 while(!bufInfo.empty())
2996 {
2997 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2998 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2999 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3000 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3001 }
3002 }
3003
3004 // Test stack.
3005 {
3006 // Allocate number of buffers of varying size that surely fit into this block.
3007 for(size_t i = 0; i < maxBufCount; ++i)
3008 {
Adam Sawickifd366b62019-01-24 15:26:43 +01003009 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003010 BufferInfo newBufInfo;
3011 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3012 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003013 TEST(res == VK_SUCCESS);
3014 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003015 bufInfo.push_back(newBufInfo);
3016 prevOffset = allocInfo.offset;
3017 }
3018
3019 // Destroy few buffers from top of the stack.
3020 for(size_t i = 0; i < maxBufCount / 5; ++i)
3021 {
3022 const BufferInfo& currBufInfo = bufInfo.back();
3023 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3024 bufInfo.pop_back();
3025 }
3026
3027 // Create some more
3028 for(size_t i = 0; i < maxBufCount / 5; ++i)
3029 {
Adam Sawickifd366b62019-01-24 15:26:43 +01003030 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003031 BufferInfo newBufInfo;
3032 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3033 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003034 TEST(res == VK_SUCCESS);
3035 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003036 bufInfo.push_back(newBufInfo);
3037 prevOffset = allocInfo.offset;
3038 }
3039
3040 // Destroy the buffers in reverse order.
3041 while(!bufInfo.empty())
3042 {
3043 const BufferInfo& currBufInfo = bufInfo.back();
3044 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3045 bufInfo.pop_back();
3046 }
3047 }
3048
Adam Sawickiee082772018-06-20 17:45:49 +02003049 // Test ring buffer.
3050 {
3051 // Allocate number of buffers that surely fit into this block.
3052 bufCreateInfo.size = bufSizeMax;
3053 for(size_t i = 0; i < maxBufCount; ++i)
3054 {
3055 BufferInfo newBufInfo;
3056 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3057 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003058 TEST(res == VK_SUCCESS);
3059 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02003060 bufInfo.push_back(newBufInfo);
3061 prevOffset = allocInfo.offset;
3062 }
3063
3064 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
3065 const size_t buffersPerIter = maxBufCount / 10 - 1;
3066 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
3067 for(size_t iter = 0; iter < iterCount; ++iter)
3068 {
3069 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
3070 {
3071 const BufferInfo& currBufInfo = bufInfo.front();
3072 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3073 bufInfo.erase(bufInfo.begin());
3074 }
3075 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
3076 {
3077 BufferInfo newBufInfo;
3078 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3079 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003080 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02003081 bufInfo.push_back(newBufInfo);
3082 }
3083 }
3084
3085 // Allocate buffers until we reach out-of-memory.
3086 uint32_t debugIndex = 0;
3087 while(res == VK_SUCCESS)
3088 {
3089 BufferInfo newBufInfo;
3090 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3091 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3092 if(res == VK_SUCCESS)
3093 {
3094 bufInfo.push_back(newBufInfo);
3095 }
3096 else
3097 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003098 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02003099 }
3100 ++debugIndex;
3101 }
3102
3103 // Destroy the buffers in random order.
3104 while(!bufInfo.empty())
3105 {
3106 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3107 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3108 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3109 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3110 }
3111 }
3112
Adam Sawicki680b2252018-08-22 14:47:32 +02003113 // Test double stack.
3114 {
3115 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
3116 VkDeviceSize prevOffsetLower = 0;
3117 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
3118 for(size_t i = 0; i < maxBufCount; ++i)
3119 {
3120 const bool upperAddress = (i % 2) != 0;
3121 if(upperAddress)
3122 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3123 else
3124 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003125 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003126 BufferInfo newBufInfo;
3127 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3128 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003129 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02003130 if(upperAddress)
3131 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003132 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003133 prevOffsetUpper = allocInfo.offset;
3134 }
3135 else
3136 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003137 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02003138 prevOffsetLower = allocInfo.offset;
3139 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003140 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003141 bufInfo.push_back(newBufInfo);
3142 }
3143
3144 // Destroy few buffers from top of the stack.
3145 for(size_t i = 0; i < maxBufCount / 5; ++i)
3146 {
3147 const BufferInfo& currBufInfo = bufInfo.back();
3148 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3149 bufInfo.pop_back();
3150 }
3151
3152 // Create some more
3153 for(size_t i = 0; i < maxBufCount / 5; ++i)
3154 {
3155 const bool upperAddress = (i % 2) != 0;
3156 if(upperAddress)
3157 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3158 else
3159 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003160 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003161 BufferInfo newBufInfo;
3162 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3163 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003164 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02003165 bufInfo.push_back(newBufInfo);
3166 }
3167
3168 // Destroy the buffers in reverse order.
3169 while(!bufInfo.empty())
3170 {
3171 const BufferInfo& currBufInfo = bufInfo.back();
3172 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3173 bufInfo.pop_back();
3174 }
3175
3176 // Create buffers on both sides until we reach out of memory.
3177 prevOffsetLower = 0;
3178 prevOffsetUpper = poolCreateInfo.blockSize;
3179 res = VK_SUCCESS;
3180 for(size_t i = 0; res == VK_SUCCESS; ++i)
3181 {
3182 const bool upperAddress = (i % 2) != 0;
3183 if(upperAddress)
3184 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3185 else
3186 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003187 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003188 BufferInfo newBufInfo;
3189 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3190 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3191 if(res == VK_SUCCESS)
3192 {
3193 if(upperAddress)
3194 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003195 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003196 prevOffsetUpper = allocInfo.offset;
3197 }
3198 else
3199 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003200 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02003201 prevOffsetLower = allocInfo.offset;
3202 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003203 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003204 bufInfo.push_back(newBufInfo);
3205 }
3206 }
3207
3208 // Destroy the buffers in random order.
3209 while(!bufInfo.empty())
3210 {
3211 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3212 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3213 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3214 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3215 }
3216
3217 // Create buffers on upper side only, constant size, until we reach out of memory.
3218 prevOffsetUpper = poolCreateInfo.blockSize;
3219 res = VK_SUCCESS;
3220 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3221 bufCreateInfo.size = bufSizeMax;
3222 for(size_t i = 0; res == VK_SUCCESS; ++i)
3223 {
3224 BufferInfo newBufInfo;
3225 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3226 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3227 if(res == VK_SUCCESS)
3228 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003229 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003230 prevOffsetUpper = allocInfo.offset;
3231 bufInfo.push_back(newBufInfo);
3232 }
3233 }
3234
3235 // Destroy the buffers in reverse order.
3236 while(!bufInfo.empty())
3237 {
3238 const BufferInfo& currBufInfo = bufInfo.back();
3239 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3240 bufInfo.pop_back();
3241 }
3242 }
3243
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003244 // Test ring buffer with lost allocations.
3245 {
3246 // Allocate number of buffers until pool is full.
3247 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
3248 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
3249 res = VK_SUCCESS;
3250 for(size_t i = 0; res == VK_SUCCESS; ++i)
3251 {
3252 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3253
Adam Sawickifd366b62019-01-24 15:26:43 +01003254 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003255
3256 BufferInfo newBufInfo;
3257 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3258 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3259 if(res == VK_SUCCESS)
3260 bufInfo.push_back(newBufInfo);
3261 }
3262
3263 // Free first half of it.
3264 {
3265 const size_t buffersToDelete = bufInfo.size() / 2;
3266 for(size_t i = 0; i < buffersToDelete; ++i)
3267 {
3268 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3269 }
3270 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
3271 }
3272
3273 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003274 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003275 res = VK_SUCCESS;
3276 for(size_t i = 0; res == VK_SUCCESS; ++i)
3277 {
3278 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3279
Adam Sawickifd366b62019-01-24 15:26:43 +01003280 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003281
3282 BufferInfo newBufInfo;
3283 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3284 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3285 if(res == VK_SUCCESS)
3286 bufInfo.push_back(newBufInfo);
3287 }
3288
3289 VkDeviceSize firstNewOffset;
3290 {
3291 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3292
3293 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
3294 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3295 bufCreateInfo.size = bufSizeMax;
3296
3297 BufferInfo newBufInfo;
3298 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3299 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003300 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003301 bufInfo.push_back(newBufInfo);
3302 firstNewOffset = allocInfo.offset;
3303
3304 // Make sure at least one buffer from the beginning became lost.
3305 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003306 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003307 }
3308
Adam Sawickifd366b62019-01-24 15:26:43 +01003309#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003310 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
3311 size_t newCount = 1;
3312 for(;;)
3313 {
3314 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3315
Adam Sawickifd366b62019-01-24 15:26:43 +01003316 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003317
3318 BufferInfo newBufInfo;
3319 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3320 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01003321
Adam Sawickib8d34d52018-10-03 17:41:20 +02003322 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003323 bufInfo.push_back(newBufInfo);
3324 ++newCount;
3325 if(allocInfo.offset < firstNewOffset)
3326 break;
3327 }
Adam Sawickifd366b62019-01-24 15:26:43 +01003328#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003329
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003330 // Delete buffers that are lost.
3331 for(size_t i = bufInfo.size(); i--; )
3332 {
3333 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
3334 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3335 {
3336 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3337 bufInfo.erase(bufInfo.begin() + i);
3338 }
3339 }
3340
3341 // Test vmaMakePoolAllocationsLost
3342 {
3343 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3344
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01003345 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003346 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003347 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003348
3349 size_t realLostAllocCount = 0;
3350 for(size_t i = 0; i < bufInfo.size(); ++i)
3351 {
3352 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
3353 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3354 ++realLostAllocCount;
3355 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003356 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003357 }
3358
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003359 // Destroy all the buffers in forward order.
3360 for(size_t i = 0; i < bufInfo.size(); ++i)
3361 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3362 bufInfo.clear();
3363 }
3364
Adam Sawicki70a683e2018-08-24 15:36:32 +02003365 vmaDestroyPool(g_hAllocator, pool);
3366}
Adam Sawickif799c4f2018-08-23 10:40:30 +02003367
Adam Sawicki70a683e2018-08-24 15:36:32 +02003368static void TestLinearAllocatorMultiBlock()
3369{
3370 wprintf(L"Test linear allocator multi block\n");
3371
3372 RandomNumberGenerator rand{345673};
3373
3374 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3375 sampleBufCreateInfo.size = 1024 * 1024;
3376 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3377
3378 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3379 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3380
3381 VmaPoolCreateInfo poolCreateInfo = {};
3382 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3383 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003384 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003385
3386 VmaPool pool = nullptr;
3387 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003388 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003389
3390 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
3391
3392 VmaAllocationCreateInfo allocCreateInfo = {};
3393 allocCreateInfo.pool = pool;
3394
3395 std::vector<BufferInfo> bufInfo;
3396 VmaAllocationInfo allocInfo;
3397
3398 // Test one-time free.
3399 {
3400 // Allocate buffers until we move to a second block.
3401 VkDeviceMemory lastMem = VK_NULL_HANDLE;
3402 for(uint32_t i = 0; ; ++i)
3403 {
3404 BufferInfo newBufInfo;
3405 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3406 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003407 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003408 bufInfo.push_back(newBufInfo);
3409 if(lastMem && allocInfo.deviceMemory != lastMem)
3410 {
3411 break;
3412 }
3413 lastMem = allocInfo.deviceMemory;
3414 }
3415
Adam Sawickib8d34d52018-10-03 17:41:20 +02003416 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003417
3418 // Make sure that pool has now two blocks.
3419 VmaPoolStats poolStats = {};
3420 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003421 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003422
3423 // Destroy all the buffers in random order.
3424 while(!bufInfo.empty())
3425 {
3426 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3427 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3428 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3429 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3430 }
3431
3432 // Make sure that pool has now at most one block.
3433 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003434 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003435 }
3436
3437 // Test stack.
3438 {
3439 // Allocate buffers until we move to a second block.
3440 VkDeviceMemory lastMem = VK_NULL_HANDLE;
3441 for(uint32_t i = 0; ; ++i)
3442 {
3443 BufferInfo newBufInfo;
3444 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3445 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003446 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003447 bufInfo.push_back(newBufInfo);
3448 if(lastMem && allocInfo.deviceMemory != lastMem)
3449 {
3450 break;
3451 }
3452 lastMem = allocInfo.deviceMemory;
3453 }
3454
Adam Sawickib8d34d52018-10-03 17:41:20 +02003455 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003456
3457 // Add few more buffers.
3458 for(uint32_t i = 0; i < 5; ++i)
3459 {
3460 BufferInfo newBufInfo;
3461 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3462 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003463 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003464 bufInfo.push_back(newBufInfo);
3465 }
3466
3467 // Make sure that pool has now two blocks.
3468 VmaPoolStats poolStats = {};
3469 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003470 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003471
3472 // Delete half of buffers, LIFO.
3473 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
3474 {
3475 const BufferInfo& currBufInfo = bufInfo.back();
3476 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3477 bufInfo.pop_back();
3478 }
3479
3480 // Add one more buffer.
3481 BufferInfo newBufInfo;
3482 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3483 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003484 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003485 bufInfo.push_back(newBufInfo);
3486
3487 // Make sure that pool has now one block.
3488 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003489 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003490
3491 // Delete all the remaining buffers, LIFO.
3492 while(!bufInfo.empty())
3493 {
3494 const BufferInfo& currBufInfo = bufInfo.back();
3495 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3496 bufInfo.pop_back();
3497 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02003498 }
3499
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003500 vmaDestroyPool(g_hAllocator, pool);
3501}
3502
Adam Sawickifd11d752018-08-22 15:02:10 +02003503static void ManuallyTestLinearAllocator()
3504{
3505 VmaStats origStats;
3506 vmaCalculateStats(g_hAllocator, &origStats);
3507
3508 wprintf(L"Manually test linear allocator\n");
3509
3510 RandomNumberGenerator rand{645332};
3511
3512 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3513 sampleBufCreateInfo.size = 1024; // Whatever.
3514 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
3515
3516 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3517 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3518
3519 VmaPoolCreateInfo poolCreateInfo = {};
3520 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003521 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003522
3523 poolCreateInfo.blockSize = 10 * 1024;
3524 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3525 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
3526
3527 VmaPool pool = nullptr;
3528 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003529 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003530
3531 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
3532
3533 VmaAllocationCreateInfo allocCreateInfo = {};
3534 allocCreateInfo.pool = pool;
3535
3536 std::vector<BufferInfo> bufInfo;
3537 VmaAllocationInfo allocInfo;
3538 BufferInfo newBufInfo;
3539
3540 // Test double stack.
3541 {
3542 /*
3543 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
3544 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
3545
3546 Totally:
3547 1 block allocated
3548 10240 Vulkan bytes
3549 6 new allocations
3550 2256 bytes in allocations
3551 */
3552
3553 bufCreateInfo.size = 32;
3554 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3555 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003556 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003557 bufInfo.push_back(newBufInfo);
3558
3559 bufCreateInfo.size = 1024;
3560 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3561 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003562 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003563 bufInfo.push_back(newBufInfo);
3564
3565 bufCreateInfo.size = 32;
3566 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3567 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003568 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003569 bufInfo.push_back(newBufInfo);
3570
3571 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3572
3573 bufCreateInfo.size = 128;
3574 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3575 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003576 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003577 bufInfo.push_back(newBufInfo);
3578
3579 bufCreateInfo.size = 1024;
3580 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3581 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003582 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003583 bufInfo.push_back(newBufInfo);
3584
3585 bufCreateInfo.size = 16;
3586 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3587 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003588 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003589 bufInfo.push_back(newBufInfo);
3590
3591 VmaStats currStats;
3592 vmaCalculateStats(g_hAllocator, &currStats);
3593 VmaPoolStats poolStats;
3594 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
3595
3596 char* statsStr = nullptr;
3597 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
3598
3599 // PUT BREAKPOINT HERE TO CHECK.
3600 // Inspect: currStats versus origStats, poolStats, statsStr.
3601 int I = 0;
3602
3603 vmaFreeStatsString(g_hAllocator, statsStr);
3604
3605 // Destroy the buffers in reverse order.
3606 while(!bufInfo.empty())
3607 {
3608 const BufferInfo& currBufInfo = bufInfo.back();
3609 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3610 bufInfo.pop_back();
3611 }
3612 }
3613
3614 vmaDestroyPool(g_hAllocator, pool);
3615}
3616
Adam Sawicki80927152018-09-07 17:27:23 +02003617static void BenchmarkAlgorithmsCase(FILE* file,
3618 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003619 bool empty,
3620 VmaAllocationCreateFlags allocStrategy,
3621 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02003622{
3623 RandomNumberGenerator rand{16223};
3624
3625 const VkDeviceSize bufSizeMin = 32;
3626 const VkDeviceSize bufSizeMax = 1024;
3627 const size_t maxBufCapacity = 10000;
3628 const uint32_t iterationCount = 10;
3629
3630 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3631 sampleBufCreateInfo.size = bufSizeMax;
3632 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
3633
3634 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3635 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3636
3637 VmaPoolCreateInfo poolCreateInfo = {};
3638 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003639 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003640
3641 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02003642 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02003643 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
3644
3645 VmaPool pool = nullptr;
3646 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003647 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003648
3649 // Buffer created just to get memory requirements. Never bound to any memory.
3650 VkBuffer dummyBuffer = VK_NULL_HANDLE;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003651 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003652 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02003653
3654 VkMemoryRequirements memReq = {};
3655 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3656
Adam Sawicki1f84f622019-07-02 13:40:01 +02003657 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawicki0a607132018-08-24 11:18:41 +02003658
3659 VmaAllocationCreateInfo allocCreateInfo = {};
3660 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003661 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02003662
3663 VmaAllocation alloc;
3664 std::vector<VmaAllocation> baseAllocations;
3665
3666 if(!empty)
3667 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003668 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02003669 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003670 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02003671 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01003672 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
3673 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02003674 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
3675 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003676 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003677 baseAllocations.push_back(alloc);
3678 totalSize += memReq.size;
3679 }
3680
3681 // Delete half of them, choose randomly.
3682 size_t allocsToDelete = baseAllocations.size() / 2;
3683 for(size_t i = 0; i < allocsToDelete; ++i)
3684 {
3685 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
3686 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
3687 baseAllocations.erase(baseAllocations.begin() + index);
3688 }
3689 }
3690
3691 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003692 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02003693 std::vector<VmaAllocation> testAllocations;
3694 testAllocations.reserve(allocCount);
3695 duration allocTotalDuration = duration::zero();
3696 duration freeTotalDuration = duration::zero();
3697 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
3698 {
3699 // Allocations
3700 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
3701 for(size_t i = 0; i < allocCount; ++i)
3702 {
3703 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
3704 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003705 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003706 testAllocations.push_back(alloc);
3707 }
3708 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
3709
3710 // Deallocations
3711 switch(freeOrder)
3712 {
3713 case FREE_ORDER::FORWARD:
3714 // Leave testAllocations unchanged.
3715 break;
3716 case FREE_ORDER::BACKWARD:
3717 std::reverse(testAllocations.begin(), testAllocations.end());
3718 break;
3719 case FREE_ORDER::RANDOM:
3720 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
3721 break;
3722 default: assert(0);
3723 }
3724
3725 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
3726 for(size_t i = 0; i < allocCount; ++i)
3727 vmaFreeMemory(g_hAllocator, testAllocations[i]);
3728 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
3729
3730 testAllocations.clear();
3731 }
3732
3733 // Delete baseAllocations
3734 while(!baseAllocations.empty())
3735 {
3736 vmaFreeMemory(g_hAllocator, baseAllocations.back());
3737 baseAllocations.pop_back();
3738 }
3739
3740 vmaDestroyPool(g_hAllocator, pool);
3741
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003742 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
3743 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
3744
Adam Sawicki80927152018-09-07 17:27:23 +02003745 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
3746 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02003747 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003748 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02003749 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003750 allocTotalSeconds,
3751 freeTotalSeconds);
3752
3753 if(file)
3754 {
3755 std::string currTime;
3756 CurrentTimeToStr(currTime);
3757
Adam Sawicki80927152018-09-07 17:27:23 +02003758 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003759 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02003760 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003761 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003762 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003763 FREE_ORDER_NAMES[(uint32_t)freeOrder],
3764 allocTotalSeconds,
3765 freeTotalSeconds);
3766 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003767}
3768
Adam Sawicki80927152018-09-07 17:27:23 +02003769static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02003770{
Adam Sawicki80927152018-09-07 17:27:23 +02003771 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02003772
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003773 if(file)
3774 {
3775 fprintf(file,
3776 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02003777 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003778 "Allocation time (s),Deallocation time (s)\n");
3779 }
3780
Adam Sawicki0a607132018-08-24 11:18:41 +02003781 uint32_t freeOrderCount = 1;
3782 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
3783 freeOrderCount = 3;
3784 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
3785 freeOrderCount = 2;
3786
3787 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003788 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003789
3790 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3791 {
3792 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3793 switch(freeOrderIndex)
3794 {
3795 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3796 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3797 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3798 default: assert(0);
3799 }
3800
3801 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3802 {
Adam Sawicki80927152018-09-07 17:27:23 +02003803 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003804 {
Adam Sawicki80927152018-09-07 17:27:23 +02003805 uint32_t algorithm = 0;
3806 switch(algorithmIndex)
3807 {
3808 case 0:
3809 break;
3810 case 1:
3811 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3812 break;
3813 case 2:
3814 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3815 break;
3816 default:
3817 assert(0);
3818 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003819
Adam Sawicki80927152018-09-07 17:27:23 +02003820 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003821 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3822 {
3823 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003824 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003825 {
3826 switch(allocStrategyIndex)
3827 {
3828 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3829 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3830 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3831 default: assert(0);
3832 }
3833 }
3834
Adam Sawicki80927152018-09-07 17:27:23 +02003835 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003836 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003837 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003838 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003839 strategy,
3840 freeOrder); // freeOrder
3841 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003842 }
3843 }
3844 }
3845}
3846
Adam Sawickib8333fb2018-03-13 16:15:53 +01003847static void TestPool_SameSize()
3848{
3849 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3850 const size_t BUF_COUNT = 100;
3851 VkResult res;
3852
3853 RandomNumberGenerator rand{123};
3854
3855 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3856 bufferInfo.size = BUF_SIZE;
3857 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3858
3859 uint32_t memoryTypeBits = UINT32_MAX;
3860 {
3861 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003862 res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003863 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003864
3865 VkMemoryRequirements memReq;
3866 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3867 memoryTypeBits = memReq.memoryTypeBits;
3868
Adam Sawicki1f84f622019-07-02 13:40:01 +02003869 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003870 }
3871
3872 VmaAllocationCreateInfo poolAllocInfo = {};
3873 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3874 uint32_t memTypeIndex;
3875 res = vmaFindMemoryTypeIndex(
3876 g_hAllocator,
3877 memoryTypeBits,
3878 &poolAllocInfo,
3879 &memTypeIndex);
3880
3881 VmaPoolCreateInfo poolCreateInfo = {};
3882 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3883 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3884 poolCreateInfo.minBlockCount = 1;
3885 poolCreateInfo.maxBlockCount = 4;
3886 poolCreateInfo.frameInUseCount = 0;
3887
3888 VmaPool pool;
3889 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003890 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003891
Adam Sawickia020fb82019-11-02 14:43:06 +01003892 // Test pool name
3893 {
3894 static const char* const POOL_NAME = "Pool name";
3895 vmaSetPoolName(g_hAllocator, pool, POOL_NAME);
3896
3897 const char* fetchedPoolName = nullptr;
3898 vmaGetPoolName(g_hAllocator, pool, &fetchedPoolName);
3899 TEST(strcmp(fetchedPoolName, POOL_NAME) == 0);
3900
Adam Sawickia020fb82019-11-02 14:43:06 +01003901 vmaSetPoolName(g_hAllocator, pool, nullptr);
3902 }
3903
Adam Sawickib8333fb2018-03-13 16:15:53 +01003904 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3905
3906 VmaAllocationCreateInfo allocInfo = {};
3907 allocInfo.pool = pool;
3908 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3909 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3910
3911 struct BufItem
3912 {
3913 VkBuffer Buf;
3914 VmaAllocation Alloc;
3915 };
3916 std::vector<BufItem> items;
3917
3918 // Fill entire pool.
3919 for(size_t i = 0; i < BUF_COUNT; ++i)
3920 {
3921 BufItem item;
3922 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003923 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003924 items.push_back(item);
3925 }
3926
3927 // Make sure that another allocation would fail.
3928 {
3929 BufItem item;
3930 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003931 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003932 }
3933
3934 // Validate that no buffer is lost. Also check that they are not mapped.
3935 for(size_t i = 0; i < items.size(); ++i)
3936 {
3937 VmaAllocationInfo allocInfo;
3938 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003939 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3940 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003941 }
3942
3943 // Free some percent of random items.
3944 {
3945 const size_t PERCENT_TO_FREE = 10;
3946 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3947 for(size_t i = 0; i < itemsToFree; ++i)
3948 {
3949 size_t index = (size_t)rand.Generate() % items.size();
3950 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3951 items.erase(items.begin() + index);
3952 }
3953 }
3954
3955 // Randomly allocate and free items.
3956 {
3957 const size_t OPERATION_COUNT = BUF_COUNT;
3958 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3959 {
3960 bool allocate = rand.Generate() % 2 != 0;
3961 if(allocate)
3962 {
3963 if(items.size() < BUF_COUNT)
3964 {
3965 BufItem item;
3966 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003967 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003968 items.push_back(item);
3969 }
3970 }
3971 else // Free
3972 {
3973 if(!items.empty())
3974 {
3975 size_t index = (size_t)rand.Generate() % items.size();
3976 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3977 items.erase(items.begin() + index);
3978 }
3979 }
3980 }
3981 }
3982
3983 // Allocate up to maximum.
3984 while(items.size() < BUF_COUNT)
3985 {
3986 BufItem item;
3987 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003988 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003989 items.push_back(item);
3990 }
3991
3992 // Validate that no buffer is lost.
3993 for(size_t i = 0; i < items.size(); ++i)
3994 {
3995 VmaAllocationInfo allocInfo;
3996 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003997 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003998 }
3999
4000 // Next frame.
4001 vmaSetCurrentFrameIndex(g_hAllocator, 2);
4002
4003 // Allocate another BUF_COUNT buffers.
4004 for(size_t i = 0; i < BUF_COUNT; ++i)
4005 {
4006 BufItem item;
4007 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004008 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004009 items.push_back(item);
4010 }
4011
4012 // Make sure the first BUF_COUNT is lost. Delete them.
4013 for(size_t i = 0; i < BUF_COUNT; ++i)
4014 {
4015 VmaAllocationInfo allocInfo;
4016 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004017 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004018 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4019 }
4020 items.erase(items.begin(), items.begin() + BUF_COUNT);
4021
4022 // Validate that no buffer is lost.
4023 for(size_t i = 0; i < items.size(); ++i)
4024 {
4025 VmaAllocationInfo allocInfo;
4026 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004027 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004028 }
4029
4030 // Free one item.
4031 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
4032 items.pop_back();
4033
4034 // Validate statistics.
4035 {
4036 VmaPoolStats poolStats = {};
4037 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004038 TEST(poolStats.allocationCount == items.size());
4039 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
4040 TEST(poolStats.unusedRangeCount == 1);
4041 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
4042 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004043 }
4044
4045 // Free all remaining items.
4046 for(size_t i = items.size(); i--; )
4047 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4048 items.clear();
4049
4050 // Allocate maximum items again.
4051 for(size_t i = 0; i < BUF_COUNT; ++i)
4052 {
4053 BufItem item;
4054 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004055 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004056 items.push_back(item);
4057 }
4058
4059 // Delete every other item.
4060 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
4061 {
4062 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4063 items.erase(items.begin() + i);
4064 }
4065
4066 // Defragment!
4067 {
4068 std::vector<VmaAllocation> allocationsToDefragment(items.size());
4069 for(size_t i = 0; i < items.size(); ++i)
4070 allocationsToDefragment[i] = items[i].Alloc;
4071
4072 VmaDefragmentationStats defragmentationStats;
4073 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004074 TEST(res == VK_SUCCESS);
4075 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004076 }
4077
4078 // Free all remaining items.
4079 for(size_t i = items.size(); i--; )
4080 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4081 items.clear();
4082
4083 ////////////////////////////////////////////////////////////////////////////////
4084 // Test for vmaMakePoolAllocationsLost
4085
4086 // Allocate 4 buffers on frame 10.
4087 vmaSetCurrentFrameIndex(g_hAllocator, 10);
4088 for(size_t i = 0; i < 4; ++i)
4089 {
4090 BufItem item;
4091 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004092 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004093 items.push_back(item);
4094 }
4095
4096 // Touch first 2 of them on frame 11.
4097 vmaSetCurrentFrameIndex(g_hAllocator, 11);
4098 for(size_t i = 0; i < 2; ++i)
4099 {
4100 VmaAllocationInfo allocInfo;
4101 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
4102 }
4103
4104 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
4105 size_t lostCount = 0xDEADC0DE;
4106 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004107 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004108
4109 // Make another call. Now 0 should be lost.
4110 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004111 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004112
4113 // Make another call, with null count. Should not crash.
4114 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
4115
4116 // END: Free all remaining items.
4117 for(size_t i = items.size(); i--; )
4118 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4119
4120 items.clear();
4121
Adam Sawickid2924172018-06-11 12:48:46 +02004122 ////////////////////////////////////////////////////////////////////////////////
4123 // Test for allocation too large for pool
4124
4125 {
4126 VmaAllocationCreateInfo allocCreateInfo = {};
4127 allocCreateInfo.pool = pool;
4128
4129 VkMemoryRequirements memReq;
4130 memReq.memoryTypeBits = UINT32_MAX;
4131 memReq.alignment = 1;
4132 memReq.size = poolCreateInfo.blockSize + 4;
4133
4134 VmaAllocation alloc = nullptr;
4135 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004136 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02004137 }
4138
Adam Sawickib8333fb2018-03-13 16:15:53 +01004139 vmaDestroyPool(g_hAllocator, pool);
4140}
4141
Adam Sawickie44c6262018-06-15 14:30:39 +02004142static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
4143{
4144 const uint8_t* pBytes = (const uint8_t*)pMemory;
4145 for(size_t i = 0; i < size; ++i)
4146 {
4147 if(pBytes[i] != pattern)
4148 {
4149 return false;
4150 }
4151 }
4152 return true;
4153}
4154
4155static void TestAllocationsInitialization()
4156{
4157 VkResult res;
4158
4159 const size_t BUF_SIZE = 1024;
4160
4161 // Create pool.
4162
4163 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4164 bufInfo.size = BUF_SIZE;
4165 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4166
4167 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
4168 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4169
4170 VmaPoolCreateInfo poolCreateInfo = {};
4171 poolCreateInfo.blockSize = BUF_SIZE * 10;
4172 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
4173 poolCreateInfo.maxBlockCount = 1;
4174 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004175 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004176
4177 VmaAllocationCreateInfo bufAllocCreateInfo = {};
4178 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004179 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004180
4181 // Create one persistently mapped buffer to keep memory of this block mapped,
4182 // so that pointer to mapped data will remain (more or less...) valid even
4183 // after destruction of other allocations.
4184
4185 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
4186 VkBuffer firstBuf;
4187 VmaAllocation firstAlloc;
4188 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004189 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004190
4191 // Test buffers.
4192
4193 for(uint32_t i = 0; i < 2; ++i)
4194 {
4195 const bool persistentlyMapped = i == 0;
4196 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
4197 VkBuffer buf;
4198 VmaAllocation alloc;
4199 VmaAllocationInfo allocInfo;
4200 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004201 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004202
4203 void* pMappedData;
4204 if(!persistentlyMapped)
4205 {
4206 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004207 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004208 }
4209 else
4210 {
4211 pMappedData = allocInfo.pMappedData;
4212 }
4213
4214 // Validate initialized content
4215 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004216 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02004217
4218 if(!persistentlyMapped)
4219 {
4220 vmaUnmapMemory(g_hAllocator, alloc);
4221 }
4222
4223 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4224
4225 // Validate freed content
4226 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004227 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02004228 }
4229
4230 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
4231 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
4232}
4233
Adam Sawickib8333fb2018-03-13 16:15:53 +01004234static void TestPool_Benchmark(
4235 PoolTestResult& outResult,
4236 const PoolTestConfig& config)
4237{
Adam Sawickib8d34d52018-10-03 17:41:20 +02004238 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004239
4240 RandomNumberGenerator mainRand{config.RandSeed};
4241
4242 uint32_t allocationSizeProbabilitySum = std::accumulate(
4243 config.AllocationSizes.begin(),
4244 config.AllocationSizes.end(),
4245 0u,
4246 [](uint32_t sum, const AllocationSize& allocSize) {
4247 return sum + allocSize.Probability;
4248 });
4249
4250 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4251 bufferInfo.size = 256; // Whatever.
4252 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4253
4254 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4255 imageInfo.imageType = VK_IMAGE_TYPE_2D;
4256 imageInfo.extent.width = 256; // Whatever.
4257 imageInfo.extent.height = 256; // Whatever.
4258 imageInfo.extent.depth = 1;
4259 imageInfo.mipLevels = 1;
4260 imageInfo.arrayLayers = 1;
4261 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4262 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
4263 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
4264 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
4265 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4266
4267 uint32_t bufferMemoryTypeBits = UINT32_MAX;
4268 {
4269 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02004270 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004271 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004272
4273 VkMemoryRequirements memReq;
4274 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
4275 bufferMemoryTypeBits = memReq.memoryTypeBits;
4276
Adam Sawicki1f84f622019-07-02 13:40:01 +02004277 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004278 }
4279
4280 uint32_t imageMemoryTypeBits = UINT32_MAX;
4281 {
4282 VkImage dummyImage;
Adam Sawicki1f84f622019-07-02 13:40:01 +02004283 VkResult res = vkCreateImage(g_hDevice, &imageInfo, g_Allocs, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004284 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004285
4286 VkMemoryRequirements memReq;
4287 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
4288 imageMemoryTypeBits = memReq.memoryTypeBits;
4289
Adam Sawicki1f84f622019-07-02 13:40:01 +02004290 vkDestroyImage(g_hDevice, dummyImage, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004291 }
4292
4293 uint32_t memoryTypeBits = 0;
4294 if(config.UsesBuffers() && config.UsesImages())
4295 {
4296 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
4297 if(memoryTypeBits == 0)
4298 {
4299 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
4300 return;
4301 }
4302 }
4303 else if(config.UsesBuffers())
4304 memoryTypeBits = bufferMemoryTypeBits;
4305 else if(config.UsesImages())
4306 memoryTypeBits = imageMemoryTypeBits;
4307 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004308 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004309
4310 VmaPoolCreateInfo poolCreateInfo = {};
4311 poolCreateInfo.memoryTypeIndex = 0;
4312 poolCreateInfo.minBlockCount = 1;
4313 poolCreateInfo.maxBlockCount = 1;
4314 poolCreateInfo.blockSize = config.PoolSize;
4315 poolCreateInfo.frameInUseCount = 1;
4316
4317 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
4318 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4319 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4320
4321 VmaPool pool;
4322 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004323 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004324
4325 // Start time measurement - after creating pool and initializing data structures.
4326 time_point timeBeg = std::chrono::high_resolution_clock::now();
4327
4328 ////////////////////////////////////////////////////////////////////////////////
4329 // ThreadProc
4330 auto ThreadProc = [&](
4331 PoolTestThreadResult* outThreadResult,
4332 uint32_t randSeed,
4333 HANDLE frameStartEvent,
4334 HANDLE frameEndEvent) -> void
4335 {
4336 RandomNumberGenerator threadRand{randSeed};
4337
4338 outThreadResult->AllocationTimeMin = duration::max();
4339 outThreadResult->AllocationTimeSum = duration::zero();
4340 outThreadResult->AllocationTimeMax = duration::min();
4341 outThreadResult->DeallocationTimeMin = duration::max();
4342 outThreadResult->DeallocationTimeSum = duration::zero();
4343 outThreadResult->DeallocationTimeMax = duration::min();
4344 outThreadResult->AllocationCount = 0;
4345 outThreadResult->DeallocationCount = 0;
4346 outThreadResult->LostAllocationCount = 0;
4347 outThreadResult->LostAllocationTotalSize = 0;
4348 outThreadResult->FailedAllocationCount = 0;
4349 outThreadResult->FailedAllocationTotalSize = 0;
4350
4351 struct Item
4352 {
4353 VkDeviceSize BufferSize;
4354 VkExtent2D ImageSize;
4355 VkBuffer Buf;
4356 VkImage Image;
4357 VmaAllocation Alloc;
4358
4359 VkDeviceSize CalcSizeBytes() const
4360 {
4361 return BufferSize +
4362 ImageSize.width * ImageSize.height * 4;
4363 }
4364 };
4365 std::vector<Item> unusedItems, usedItems;
4366
4367 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
4368
4369 // Create all items - all unused, not yet allocated.
4370 for(size_t i = 0; i < threadTotalItemCount; ++i)
4371 {
4372 Item item = {};
4373
4374 uint32_t allocSizeIndex = 0;
4375 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
4376 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
4377 r -= config.AllocationSizes[allocSizeIndex++].Probability;
4378
4379 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
4380 if(allocSize.BufferSizeMax > 0)
4381 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004382 TEST(allocSize.BufferSizeMin > 0);
4383 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004384 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
4385 item.BufferSize = allocSize.BufferSizeMin;
4386 else
4387 {
4388 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
4389 item.BufferSize = item.BufferSize / 16 * 16;
4390 }
4391 }
4392 else
4393 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004394 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004395 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
4396 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
4397 else
4398 {
4399 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
4400 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
4401 }
4402 }
4403
4404 unusedItems.push_back(item);
4405 }
4406
4407 auto Allocate = [&](Item& item) -> VkResult
4408 {
4409 VmaAllocationCreateInfo allocCreateInfo = {};
4410 allocCreateInfo.pool = pool;
4411 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
4412 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
4413
4414 if(item.BufferSize)
4415 {
4416 bufferInfo.size = item.BufferSize;
4417 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4418 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
4419 }
4420 else
4421 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004422 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004423
4424 imageInfo.extent.width = item.ImageSize.width;
4425 imageInfo.extent.height = item.ImageSize.height;
4426 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4427 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
4428 }
4429 };
4430
4431 ////////////////////////////////////////////////////////////////////////////////
4432 // Frames
4433 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
4434 {
4435 WaitForSingleObject(frameStartEvent, INFINITE);
4436
4437 // Always make some percent of used bufs unused, to choose different used ones.
4438 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
4439 for(size_t i = 0; i < bufsToMakeUnused; ++i)
4440 {
4441 size_t index = threadRand.Generate() % usedItems.size();
4442 unusedItems.push_back(usedItems[index]);
4443 usedItems.erase(usedItems.begin() + index);
4444 }
4445
4446 // Determine which bufs we want to use in this frame.
4447 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
4448 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004449 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01004450 // Move some used to unused.
4451 while(usedBufCount < usedItems.size())
4452 {
4453 size_t index = threadRand.Generate() % usedItems.size();
4454 unusedItems.push_back(usedItems[index]);
4455 usedItems.erase(usedItems.begin() + index);
4456 }
4457 // Move some unused to used.
4458 while(usedBufCount > usedItems.size())
4459 {
4460 size_t index = threadRand.Generate() % unusedItems.size();
4461 usedItems.push_back(unusedItems[index]);
4462 unusedItems.erase(unusedItems.begin() + index);
4463 }
4464
4465 uint32_t touchExistingCount = 0;
4466 uint32_t touchLostCount = 0;
4467 uint32_t createSucceededCount = 0;
4468 uint32_t createFailedCount = 0;
4469
4470 // Touch all used bufs. If not created or lost, allocate.
4471 for(size_t i = 0; i < usedItems.size(); ++i)
4472 {
4473 Item& item = usedItems[i];
4474 // Not yet created.
4475 if(item.Alloc == VK_NULL_HANDLE)
4476 {
4477 res = Allocate(item);
4478 ++outThreadResult->AllocationCount;
4479 if(res != VK_SUCCESS)
4480 {
4481 item.Alloc = VK_NULL_HANDLE;
4482 item.Buf = VK_NULL_HANDLE;
4483 ++outThreadResult->FailedAllocationCount;
4484 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
4485 ++createFailedCount;
4486 }
4487 else
4488 ++createSucceededCount;
4489 }
4490 else
4491 {
4492 // Touch.
4493 VmaAllocationInfo allocInfo;
4494 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
4495 // Lost.
4496 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
4497 {
4498 ++touchLostCount;
4499
4500 // Destroy.
4501 {
4502 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4503 if(item.Buf)
4504 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
4505 else
4506 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
4507 ++outThreadResult->DeallocationCount;
4508 }
4509 item.Alloc = VK_NULL_HANDLE;
4510 item.Buf = VK_NULL_HANDLE;
4511
4512 ++outThreadResult->LostAllocationCount;
4513 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
4514
4515 // Recreate.
4516 res = Allocate(item);
4517 ++outThreadResult->AllocationCount;
4518 // Creation failed.
4519 if(res != VK_SUCCESS)
4520 {
4521 ++outThreadResult->FailedAllocationCount;
4522 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
4523 ++createFailedCount;
4524 }
4525 else
4526 ++createSucceededCount;
4527 }
4528 else
4529 ++touchExistingCount;
4530 }
4531 }
4532
4533 /*
4534 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
4535 randSeed, frameIndex,
4536 touchExistingCount, touchLostCount,
4537 createSucceededCount, createFailedCount);
4538 */
4539
4540 SetEvent(frameEndEvent);
4541 }
4542
4543 // Free all remaining items.
4544 for(size_t i = usedItems.size(); i--; )
4545 {
4546 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4547 if(usedItems[i].Buf)
4548 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
4549 else
4550 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
4551 ++outThreadResult->DeallocationCount;
4552 }
4553 for(size_t i = unusedItems.size(); i--; )
4554 {
4555 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
4556 if(unusedItems[i].Buf)
4557 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
4558 else
4559 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
4560 ++outThreadResult->DeallocationCount;
4561 }
4562 };
4563
4564 // Launch threads.
4565 uint32_t threadRandSeed = mainRand.Generate();
4566 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
4567 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
4568 std::vector<std::thread> bkgThreads;
4569 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
4570 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4571 {
4572 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
4573 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
4574 bkgThreads.emplace_back(std::bind(
4575 ThreadProc,
4576 &threadResults[threadIndex],
4577 threadRandSeed + threadIndex,
4578 frameStartEvents[threadIndex],
4579 frameEndEvents[threadIndex]));
4580 }
4581
4582 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02004583 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004584 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
4585 {
4586 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
4587 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4588 SetEvent(frameStartEvents[threadIndex]);
4589 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
4590 }
4591
4592 // Wait for threads finished
4593 for(size_t i = 0; i < bkgThreads.size(); ++i)
4594 {
4595 bkgThreads[i].join();
4596 CloseHandle(frameEndEvents[i]);
4597 CloseHandle(frameStartEvents[i]);
4598 }
4599 bkgThreads.clear();
4600
4601 // Finish time measurement - before destroying pool.
4602 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
4603
4604 vmaDestroyPool(g_hAllocator, pool);
4605
4606 outResult.AllocationTimeMin = duration::max();
4607 outResult.AllocationTimeAvg = duration::zero();
4608 outResult.AllocationTimeMax = duration::min();
4609 outResult.DeallocationTimeMin = duration::max();
4610 outResult.DeallocationTimeAvg = duration::zero();
4611 outResult.DeallocationTimeMax = duration::min();
4612 outResult.LostAllocationCount = 0;
4613 outResult.LostAllocationTotalSize = 0;
4614 outResult.FailedAllocationCount = 0;
4615 outResult.FailedAllocationTotalSize = 0;
4616 size_t allocationCount = 0;
4617 size_t deallocationCount = 0;
4618 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4619 {
4620 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
4621 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
4622 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
4623 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
4624 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
4625 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
4626 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
4627 allocationCount += threadResult.AllocationCount;
4628 deallocationCount += threadResult.DeallocationCount;
4629 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
4630 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
4631 outResult.LostAllocationCount += threadResult.LostAllocationCount;
4632 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
4633 }
4634 if(allocationCount)
4635 outResult.AllocationTimeAvg /= allocationCount;
4636 if(deallocationCount)
4637 outResult.DeallocationTimeAvg /= deallocationCount;
4638}
4639
4640static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
4641{
4642 if(ptr1 < ptr2)
4643 return ptr1 + size1 > ptr2;
4644 else if(ptr2 < ptr1)
4645 return ptr2 + size2 > ptr1;
4646 else
4647 return true;
4648}
4649
Adam Sawickiefa88c42019-11-18 16:33:56 +01004650static void TestMemoryUsage()
4651{
4652 wprintf(L"Testing memory usage:\n");
4653
Adam Sawicki69185552019-11-18 17:03:34 +01004654 static const VmaMemoryUsage lastUsage = VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED;
Adam Sawickiefa88c42019-11-18 16:33:56 +01004655 for(uint32_t usage = 0; usage <= lastUsage; ++usage)
4656 {
4657 switch(usage)
4658 {
4659 case VMA_MEMORY_USAGE_UNKNOWN: printf(" VMA_MEMORY_USAGE_UNKNOWN:\n"); break;
4660 case VMA_MEMORY_USAGE_GPU_ONLY: printf(" VMA_MEMORY_USAGE_GPU_ONLY:\n"); break;
4661 case VMA_MEMORY_USAGE_CPU_ONLY: printf(" VMA_MEMORY_USAGE_CPU_ONLY:\n"); break;
4662 case VMA_MEMORY_USAGE_CPU_TO_GPU: printf(" VMA_MEMORY_USAGE_CPU_TO_GPU:\n"); break;
4663 case VMA_MEMORY_USAGE_GPU_TO_CPU: printf(" VMA_MEMORY_USAGE_GPU_TO_CPU:\n"); break;
4664 case VMA_MEMORY_USAGE_CPU_COPY: printf(" VMA_MEMORY_USAGE_CPU_COPY:\n"); break;
Adam Sawicki69185552019-11-18 17:03:34 +01004665 case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED: printf(" VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED:\n"); break;
Adam Sawickiefa88c42019-11-18 16:33:56 +01004666 default: assert(0);
4667 }
4668
4669 auto printResult = [](const char* testName, VkResult res, uint32_t memoryTypeBits, uint32_t memoryTypeIndex)
4670 {
4671 if(res == VK_SUCCESS)
4672 printf(" %s: memoryTypeBits=0x%X, memoryTypeIndex=%u\n", testName, memoryTypeBits, memoryTypeIndex);
4673 else
4674 printf(" %s: memoryTypeBits=0x%X, FAILED with res=%d\n", testName, memoryTypeBits, (int32_t)res);
4675 };
4676
4677 // 1: Buffer for copy
4678 {
4679 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4680 bufCreateInfo.size = 65536;
4681 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4682
4683 VkBuffer buf = VK_NULL_HANDLE;
4684 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
4685 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
4686
4687 VkMemoryRequirements memReq = {};
4688 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
4689
4690 VmaAllocationCreateInfo allocCreateInfo = {};
4691 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4692 VmaAllocation alloc = VK_NULL_HANDLE;
4693 VmaAllocationInfo allocInfo = {};
4694 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
4695 if(res == VK_SUCCESS)
4696 {
4697 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4698 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
4699 TEST(res == VK_SUCCESS);
4700 }
4701 printResult("Buffer TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
4702 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4703 }
4704
4705 // 2: Vertex buffer
4706 {
4707 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4708 bufCreateInfo.size = 65536;
4709 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4710
4711 VkBuffer buf = VK_NULL_HANDLE;
4712 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
4713 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
4714
4715 VkMemoryRequirements memReq = {};
4716 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
4717
4718 VmaAllocationCreateInfo allocCreateInfo = {};
4719 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4720 VmaAllocation alloc = VK_NULL_HANDLE;
4721 VmaAllocationInfo allocInfo = {};
4722 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
4723 if(res == VK_SUCCESS)
4724 {
4725 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4726 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
4727 TEST(res == VK_SUCCESS);
4728 }
4729 printResult("Buffer TRANSFER_DST + VERTEX_BUFFER", res, memReq.memoryTypeBits, allocInfo.memoryType);
4730 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4731 }
4732
4733 // 3: Image for copy, OPTIMAL
4734 {
4735 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4736 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4737 imgCreateInfo.extent.width = 256;
4738 imgCreateInfo.extent.height = 256;
4739 imgCreateInfo.extent.depth = 1;
4740 imgCreateInfo.mipLevels = 1;
4741 imgCreateInfo.arrayLayers = 1;
4742 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4743 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4744 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4745 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
4746 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4747
4748 VkImage img = VK_NULL_HANDLE;
4749 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4750 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4751
4752 VkMemoryRequirements memReq = {};
4753 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4754
4755 VmaAllocationCreateInfo allocCreateInfo = {};
4756 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4757 VmaAllocation alloc = VK_NULL_HANDLE;
4758 VmaAllocationInfo allocInfo = {};
4759 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4760 if(res == VK_SUCCESS)
4761 {
4762 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4763 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4764 TEST(res == VK_SUCCESS);
4765 }
4766 printResult("Image OPTIMAL TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
4767
4768 vmaDestroyImage(g_hAllocator, img, alloc);
4769 }
4770
4771 // 4: Image SAMPLED, OPTIMAL
4772 {
4773 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4774 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4775 imgCreateInfo.extent.width = 256;
4776 imgCreateInfo.extent.height = 256;
4777 imgCreateInfo.extent.depth = 1;
4778 imgCreateInfo.mipLevels = 1;
4779 imgCreateInfo.arrayLayers = 1;
4780 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4781 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4782 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4783 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
4784 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4785
4786 VkImage img = VK_NULL_HANDLE;
4787 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4788 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4789
4790 VkMemoryRequirements memReq = {};
4791 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4792
4793 VmaAllocationCreateInfo allocCreateInfo = {};
4794 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4795 VmaAllocation alloc = VK_NULL_HANDLE;
4796 VmaAllocationInfo allocInfo = {};
4797 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4798 if(res == VK_SUCCESS)
4799 {
4800 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4801 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4802 TEST(res == VK_SUCCESS);
4803 }
4804 printResult("Image OPTIMAL TRANSFER_DST + SAMPLED", res, memReq.memoryTypeBits, allocInfo.memoryType);
4805 vmaDestroyImage(g_hAllocator, img, alloc);
4806 }
4807
4808 // 5: Image COLOR_ATTACHMENT, OPTIMAL
4809 {
4810 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4811 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4812 imgCreateInfo.extent.width = 256;
4813 imgCreateInfo.extent.height = 256;
4814 imgCreateInfo.extent.depth = 1;
4815 imgCreateInfo.mipLevels = 1;
4816 imgCreateInfo.arrayLayers = 1;
4817 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4818 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4819 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4820 imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4821 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4822
4823 VkImage img = VK_NULL_HANDLE;
4824 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4825 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4826
4827 VkMemoryRequirements memReq = {};
4828 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4829
4830 VmaAllocationCreateInfo allocCreateInfo = {};
4831 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4832 VmaAllocation alloc = VK_NULL_HANDLE;
4833 VmaAllocationInfo allocInfo = {};
4834 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4835 if(res == VK_SUCCESS)
4836 {
4837 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4838 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4839 TEST(res == VK_SUCCESS);
4840 }
4841 printResult("Image OPTIMAL SAMPLED + COLOR_ATTACHMENT", res, memReq.memoryTypeBits, allocInfo.memoryType);
4842 vmaDestroyImage(g_hAllocator, img, alloc);
4843 }
4844 }
4845}
4846
Adam Sawicki50882502020-02-07 16:51:31 +01004847static uint32_t FindDeviceCoherentMemoryTypeBits()
4848{
4849 VkPhysicalDeviceMemoryProperties memProps;
4850 vkGetPhysicalDeviceMemoryProperties(g_hPhysicalDevice, &memProps);
4851
4852 uint32_t memTypeBits = 0;
4853 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
4854 {
4855 if(memProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD)
4856 memTypeBits |= 1u << i;
4857 }
4858 return memTypeBits;
4859}
4860
4861static void TestDeviceCoherentMemory()
4862{
4863 if(!VK_AMD_device_coherent_memory_enabled)
4864 return;
4865
4866 uint32_t deviceCoherentMemoryTypeBits = FindDeviceCoherentMemoryTypeBits();
4867 // Extension is enabled, feature is enabled, and the device still doesn't support any such memory type?
4868 // OK then, so it's just fake!
4869 if(deviceCoherentMemoryTypeBits == 0)
4870 return;
4871
4872 wprintf(L"Testing device coherent memory...\n");
4873
4874 // 1. Try to allocate buffer from a memory type that is DEVICE_COHERENT.
4875
4876 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4877 bufCreateInfo.size = 0x10000;
4878 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4879
4880 VmaAllocationCreateInfo allocCreateInfo = {};
4881 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4882 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD;
4883
4884 AllocInfo alloc = {};
4885 VmaAllocationInfo allocInfo = {};
4886 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &alloc.m_Buffer, &alloc.m_Allocation, &allocInfo);
4887
4888 // Make sure it succeeded and was really created in such memory type.
4889 TEST(res == VK_SUCCESS);
4890 TEST((1u << allocInfo.memoryType) & deviceCoherentMemoryTypeBits);
4891
4892 alloc.Destroy();
4893
4894 // 2. Try to create a pool in such memory type.
4895 {
4896 VmaPoolCreateInfo poolCreateInfo = {};
4897
4898 res = vmaFindMemoryTypeIndex(g_hAllocator, UINT32_MAX, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4899 TEST(res == VK_SUCCESS);
4900 TEST((1u << poolCreateInfo.memoryTypeIndex) & deviceCoherentMemoryTypeBits);
4901
4902 VmaPool pool = VK_NULL_HANDLE;
4903 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
4904 TEST(res == VK_SUCCESS);
4905
4906 vmaDestroyPool(g_hAllocator, pool);
4907 }
4908
4909 // 3. Try the same with a local allocator created without VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT.
4910
4911 VmaAllocatorCreateInfo allocatorCreateInfo = {};
4912 SetAllocatorCreateInfo(allocatorCreateInfo);
4913 allocatorCreateInfo.flags &= ~VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT;
4914
4915 VmaAllocator localAllocator = VK_NULL_HANDLE;
4916 res = vmaCreateAllocator(&allocatorCreateInfo, &localAllocator);
4917 TEST(res == VK_SUCCESS && localAllocator);
4918
4919 res = vmaCreateBuffer(localAllocator, &bufCreateInfo, &allocCreateInfo, &alloc.m_Buffer, &alloc.m_Allocation, &allocInfo);
4920
4921 // Make sure it failed.
4922 TEST(res != VK_SUCCESS && !alloc.m_Buffer && !alloc.m_Allocation);
4923
4924 // 4. Try to find memory type.
4925 {
4926 uint32_t memTypeIndex = UINT_MAX;
4927 res = vmaFindMemoryTypeIndex(localAllocator, UINT32_MAX, &allocCreateInfo, &memTypeIndex);
4928 TEST(res != VK_SUCCESS);
4929 }
4930
4931 vmaDestroyAllocator(localAllocator);
4932}
4933
Adam Sawicki40ffe982019-10-11 15:56:02 +02004934static void TestBudget()
4935{
4936 wprintf(L"Testing budget...\n");
4937
Adam Sawicki353e3672019-11-02 14:12:05 +01004938 static const VkDeviceSize BUF_SIZE = 100ull * 1024 * 1024;
4939 static const uint32_t BUF_COUNT = 4;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004940
4941 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
4942 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004943 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
4944
4945 VmaBudget budgetBeg[VK_MAX_MEMORY_HEAPS] = {};
4946 vmaGetBudget(g_hAllocator, budgetBeg);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004947
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01004948 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
4949 {
4950 TEST(budgetBeg[i].allocationBytes <= budgetBeg[i].blockBytes);
4951 }
4952
Adam Sawicki40ffe982019-10-11 15:56:02 +02004953 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4954 bufInfo.size = BUF_SIZE;
4955 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4956
4957 VmaAllocationCreateInfo allocCreateInfo = {};
4958 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4959 if(testIndex == 0)
4960 {
4961 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4962 }
4963
4964 // CREATE BUFFERS
4965 uint32_t heapIndex = 0;
4966 BufferInfo bufInfos[BUF_COUNT] = {};
4967 for(uint32_t bufIndex = 0; bufIndex < BUF_COUNT; ++bufIndex)
4968 {
4969 VmaAllocationInfo allocInfo;
4970 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4971 &bufInfos[bufIndex].Buffer, &bufInfos[bufIndex].Allocation, &allocInfo);
4972 TEST(res == VK_SUCCESS);
4973 if(bufIndex == 0)
4974 {
4975 heapIndex = MemoryTypeToHeap(allocInfo.memoryType);
4976 }
4977 else
4978 {
4979 // All buffers need to fall into the same heap.
4980 TEST(MemoryTypeToHeap(allocInfo.memoryType) == heapIndex);
4981 }
4982 }
4983
Adam Sawicki353e3672019-11-02 14:12:05 +01004984 VmaBudget budgetWithBufs[VK_MAX_MEMORY_HEAPS] = {};
4985 vmaGetBudget(g_hAllocator, budgetWithBufs);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004986
4987 // DESTROY BUFFERS
4988 for(size_t bufIndex = BUF_COUNT; bufIndex--; )
4989 {
4990 vmaDestroyBuffer(g_hAllocator, bufInfos[bufIndex].Buffer, bufInfos[bufIndex].Allocation);
4991 }
4992
Adam Sawicki353e3672019-11-02 14:12:05 +01004993 VmaBudget budgetEnd[VK_MAX_MEMORY_HEAPS] = {};
4994 vmaGetBudget(g_hAllocator, budgetEnd);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004995
4996 // CHECK
4997 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
4998 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004999 TEST(budgetEnd[i].allocationBytes <= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005000 if(i == heapIndex)
5001 {
Adam Sawicki353e3672019-11-02 14:12:05 +01005002 TEST(budgetEnd[i].allocationBytes == budgetBeg[i].allocationBytes);
5003 TEST(budgetWithBufs[i].allocationBytes == budgetBeg[i].allocationBytes + BUF_SIZE * BUF_COUNT);
5004 TEST(budgetWithBufs[i].blockBytes >= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005005 }
5006 else
5007 {
Adam Sawicki353e3672019-11-02 14:12:05 +01005008 TEST(budgetEnd[i].allocationBytes == budgetEnd[i].allocationBytes &&
5009 budgetEnd[i].allocationBytes == budgetWithBufs[i].allocationBytes);
5010 TEST(budgetEnd[i].blockBytes == budgetEnd[i].blockBytes &&
5011 budgetEnd[i].blockBytes == budgetWithBufs[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005012 }
5013 }
5014 }
5015}
5016
Adam Sawickib8333fb2018-03-13 16:15:53 +01005017static void TestMapping()
5018{
5019 wprintf(L"Testing mapping...\n");
5020
5021 VkResult res;
5022 uint32_t memTypeIndex = UINT32_MAX;
5023
5024 enum TEST
5025 {
5026 TEST_NORMAL,
5027 TEST_POOL,
5028 TEST_DEDICATED,
5029 TEST_COUNT
5030 };
5031 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
5032 {
5033 VmaPool pool = nullptr;
5034 if(testIndex == TEST_POOL)
5035 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005036 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005037 VmaPoolCreateInfo poolInfo = {};
5038 poolInfo.memoryTypeIndex = memTypeIndex;
5039 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005040 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005041 }
5042
5043 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5044 bufInfo.size = 0x10000;
5045 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005046
Adam Sawickib8333fb2018-03-13 16:15:53 +01005047 VmaAllocationCreateInfo allocCreateInfo = {};
5048 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5049 allocCreateInfo.pool = pool;
5050 if(testIndex == TEST_DEDICATED)
5051 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005052
Adam Sawickib8333fb2018-03-13 16:15:53 +01005053 VmaAllocationInfo allocInfo;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005054
Adam Sawickib8333fb2018-03-13 16:15:53 +01005055 // Mapped manually
5056
5057 // Create 2 buffers.
5058 BufferInfo bufferInfos[3];
5059 for(size_t i = 0; i < 2; ++i)
5060 {
5061 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
5062 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005063 TEST(res == VK_SUCCESS);
5064 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005065 memTypeIndex = allocInfo.memoryType;
5066 }
Adam Sawicki40ffe982019-10-11 15:56:02 +02005067
Adam Sawickib8333fb2018-03-13 16:15:53 +01005068 // Map buffer 0.
5069 char* data00 = nullptr;
5070 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005071 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005072 data00[0xFFFF] = data00[0];
5073
5074 // Map buffer 0 second time.
5075 char* data01 = nullptr;
5076 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005077 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005078
5079 // Map buffer 1.
5080 char* data1 = nullptr;
5081 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005082 TEST(res == VK_SUCCESS && data1 != nullptr);
5083 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01005084 data1[0xFFFF] = data1[0];
5085
5086 // Unmap buffer 0 two times.
5087 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
5088 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
5089 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005090 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005091
5092 // Unmap buffer 1.
5093 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
5094 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005095 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005096
5097 // Create 3rd buffer - persistently mapped.
5098 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
5099 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
5100 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005101 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005102
5103 // Map buffer 2.
5104 char* data2 = nullptr;
5105 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005106 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005107 data2[0xFFFF] = data2[0];
5108
5109 // Unmap buffer 2.
5110 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
5111 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005112 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005113
5114 // Destroy all buffers.
5115 for(size_t i = 3; i--; )
5116 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
5117
5118 vmaDestroyPool(g_hAllocator, pool);
5119 }
5120}
5121
Adam Sawickidaa6a552019-06-25 15:26:37 +02005122// Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
5123static void TestDeviceLocalMapped()
5124{
5125 VkResult res;
5126
5127 for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
5128 {
5129 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5130 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
5131 bufCreateInfo.size = 4096;
5132
5133 VmaPool pool = VK_NULL_HANDLE;
5134 VmaAllocationCreateInfo allocCreateInfo = {};
5135 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
5136 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5137 if(testIndex == 2)
5138 {
5139 VmaPoolCreateInfo poolCreateInfo = {};
5140 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
5141 TEST(res == VK_SUCCESS);
5142 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
5143 TEST(res == VK_SUCCESS);
5144 allocCreateInfo.pool = pool;
5145 }
5146 else if(testIndex == 1)
5147 {
5148 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
5149 }
5150
5151 VkBuffer buf = VK_NULL_HANDLE;
5152 VmaAllocation alloc = VK_NULL_HANDLE;
5153 VmaAllocationInfo allocInfo = {};
5154 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
5155 TEST(res == VK_SUCCESS && alloc);
5156
5157 VkMemoryPropertyFlags memTypeFlags = 0;
5158 vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
5159 const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
5160 TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
5161
5162 vmaDestroyBuffer(g_hAllocator, buf, alloc);
5163 vmaDestroyPool(g_hAllocator, pool);
5164 }
5165}
5166
Adam Sawickib8333fb2018-03-13 16:15:53 +01005167static void TestMappingMultithreaded()
5168{
5169 wprintf(L"Testing mapping multithreaded...\n");
5170
5171 static const uint32_t threadCount = 16;
5172 static const uint32_t bufferCount = 1024;
5173 static const uint32_t threadBufferCount = bufferCount / threadCount;
5174
5175 VkResult res;
5176 volatile uint32_t memTypeIndex = UINT32_MAX;
5177
5178 enum TEST
5179 {
5180 TEST_NORMAL,
5181 TEST_POOL,
5182 TEST_DEDICATED,
5183 TEST_COUNT
5184 };
5185 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
5186 {
5187 VmaPool pool = nullptr;
5188 if(testIndex == TEST_POOL)
5189 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005190 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005191 VmaPoolCreateInfo poolInfo = {};
5192 poolInfo.memoryTypeIndex = memTypeIndex;
5193 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005194 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005195 }
5196
5197 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5198 bufCreateInfo.size = 0x10000;
5199 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5200
5201 VmaAllocationCreateInfo allocCreateInfo = {};
5202 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5203 allocCreateInfo.pool = pool;
5204 if(testIndex == TEST_DEDICATED)
5205 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5206
5207 std::thread threads[threadCount];
5208 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
5209 {
5210 threads[threadIndex] = std::thread([=, &memTypeIndex](){
5211 // ======== THREAD FUNCTION ========
5212
5213 RandomNumberGenerator rand{threadIndex};
5214
5215 enum class MODE
5216 {
5217 // Don't map this buffer at all.
5218 DONT_MAP,
5219 // Map and quickly unmap.
5220 MAP_FOR_MOMENT,
5221 // Map and unmap before destruction.
5222 MAP_FOR_LONGER,
5223 // Map two times. Quickly unmap, second unmap before destruction.
5224 MAP_TWO_TIMES,
5225 // Create this buffer as persistently mapped.
5226 PERSISTENTLY_MAPPED,
5227 COUNT
5228 };
5229 std::vector<BufferInfo> bufInfos{threadBufferCount};
5230 std::vector<MODE> bufModes{threadBufferCount};
5231
5232 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
5233 {
5234 BufferInfo& bufInfo = bufInfos[bufferIndex];
5235 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
5236 bufModes[bufferIndex] = mode;
5237
5238 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
5239 if(mode == MODE::PERSISTENTLY_MAPPED)
5240 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
5241
5242 VmaAllocationInfo allocInfo;
5243 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
5244 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005245 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005246
5247 if(memTypeIndex == UINT32_MAX)
5248 memTypeIndex = allocInfo.memoryType;
5249
5250 char* data = nullptr;
5251
5252 if(mode == MODE::PERSISTENTLY_MAPPED)
5253 {
5254 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02005255 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005256 }
5257 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
5258 mode == MODE::MAP_TWO_TIMES)
5259 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005260 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005261 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005262 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005263
5264 if(mode == MODE::MAP_TWO_TIMES)
5265 {
5266 char* data2 = nullptr;
5267 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005268 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005269 }
5270 }
5271 else if(mode == MODE::DONT_MAP)
5272 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005273 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005274 }
5275 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02005276 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005277
5278 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
5279 if(data)
5280 data[0xFFFF] = data[0];
5281
5282 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
5283 {
5284 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
5285
5286 VmaAllocationInfo allocInfo;
5287 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
5288 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02005289 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005290 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02005291 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005292 }
5293
5294 switch(rand.Generate() % 3)
5295 {
5296 case 0: Sleep(0); break; // Yield.
5297 case 1: Sleep(10); break; // 10 ms
5298 // default: No sleep.
5299 }
5300
5301 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
5302 if(data)
5303 data[0xFFFF] = data[0];
5304 }
5305
5306 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
5307 {
5308 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
5309 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
5310 {
5311 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
5312
5313 VmaAllocationInfo allocInfo;
5314 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005315 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005316 }
5317
5318 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
5319 }
5320 });
5321 }
5322
5323 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
5324 threads[threadIndex].join();
5325
5326 vmaDestroyPool(g_hAllocator, pool);
5327 }
5328}
5329
5330static void WriteMainTestResultHeader(FILE* file)
5331{
5332 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02005333 "Code,Time,"
5334 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01005335 "Total Time (us),"
5336 "Allocation Time Min (us),"
5337 "Allocation Time Avg (us),"
5338 "Allocation Time Max (us),"
5339 "Deallocation Time Min (us),"
5340 "Deallocation Time Avg (us),"
5341 "Deallocation Time Max (us),"
5342 "Total Memory Allocated (B),"
5343 "Free Range Size Avg (B),"
5344 "Free Range Size Max (B)\n");
5345}
5346
5347static void WriteMainTestResult(
5348 FILE* file,
5349 const char* codeDescription,
5350 const char* testDescription,
5351 const Config& config, const Result& result)
5352{
5353 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
5354 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
5355 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
5356 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
5357 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
5358 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
5359 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
5360
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005361 std::string currTime;
5362 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005363
5364 fprintf(file,
5365 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01005366 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
5367 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005368 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02005369 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01005370 totalTimeSeconds * 1e6f,
5371 allocationTimeMinSeconds * 1e6f,
5372 allocationTimeAvgSeconds * 1e6f,
5373 allocationTimeMaxSeconds * 1e6f,
5374 deallocationTimeMinSeconds * 1e6f,
5375 deallocationTimeAvgSeconds * 1e6f,
5376 deallocationTimeMaxSeconds * 1e6f,
5377 result.TotalMemoryAllocated,
5378 result.FreeRangeSizeAvg,
5379 result.FreeRangeSizeMax);
5380}
5381
5382static void WritePoolTestResultHeader(FILE* file)
5383{
5384 fprintf(file,
5385 "Code,Test,Time,"
5386 "Config,"
5387 "Total Time (us),"
5388 "Allocation Time Min (us),"
5389 "Allocation Time Avg (us),"
5390 "Allocation Time Max (us),"
5391 "Deallocation Time Min (us),"
5392 "Deallocation Time Avg (us),"
5393 "Deallocation Time Max (us),"
5394 "Lost Allocation Count,"
5395 "Lost Allocation Total Size (B),"
5396 "Failed Allocation Count,"
5397 "Failed Allocation Total Size (B)\n");
5398}
5399
5400static void WritePoolTestResult(
5401 FILE* file,
5402 const char* codeDescription,
5403 const char* testDescription,
5404 const PoolTestConfig& config,
5405 const PoolTestResult& result)
5406{
5407 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
5408 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
5409 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
5410 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
5411 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
5412 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
5413 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
5414
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005415 std::string currTime;
5416 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005417
5418 fprintf(file,
5419 "%s,%s,%s,"
5420 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
5421 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
5422 // General
5423 codeDescription,
5424 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005425 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01005426 // Config
5427 config.ThreadCount,
5428 (unsigned long long)config.PoolSize,
5429 config.FrameCount,
5430 config.TotalItemCount,
5431 config.UsedItemCountMin,
5432 config.UsedItemCountMax,
5433 config.ItemsToMakeUnusedPercent,
5434 // Results
5435 totalTimeSeconds * 1e6f,
5436 allocationTimeMinSeconds * 1e6f,
5437 allocationTimeAvgSeconds * 1e6f,
5438 allocationTimeMaxSeconds * 1e6f,
5439 deallocationTimeMinSeconds * 1e6f,
5440 deallocationTimeAvgSeconds * 1e6f,
5441 deallocationTimeMaxSeconds * 1e6f,
5442 result.LostAllocationCount,
5443 result.LostAllocationTotalSize,
5444 result.FailedAllocationCount,
5445 result.FailedAllocationTotalSize);
5446}
5447
5448static void PerformCustomMainTest(FILE* file)
5449{
5450 Config config{};
5451 config.RandSeed = 65735476;
5452 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
5453 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
5454 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
5455 config.FreeOrder = FREE_ORDER::FORWARD;
5456 config.ThreadCount = 16;
5457 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02005458 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005459
5460 // Buffers
5461 //config.AllocationSizes.push_back({4, 16, 1024});
5462 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5463
5464 // Images
5465 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5466 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5467
5468 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
5469 config.AdditionalOperationCount = 1024;
5470
5471 Result result{};
5472 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005473 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005474 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
5475}
5476
5477static void PerformCustomPoolTest(FILE* file)
5478{
5479 PoolTestConfig config;
5480 config.PoolSize = 100 * 1024 * 1024;
5481 config.RandSeed = 2345764;
5482 config.ThreadCount = 1;
5483 config.FrameCount = 200;
5484 config.ItemsToMakeUnusedPercent = 2;
5485
5486 AllocationSize allocSize = {};
5487 allocSize.BufferSizeMin = 1024;
5488 allocSize.BufferSizeMax = 1024 * 1024;
5489 allocSize.Probability = 1;
5490 config.AllocationSizes.push_back(allocSize);
5491
5492 allocSize.BufferSizeMin = 0;
5493 allocSize.BufferSizeMax = 0;
5494 allocSize.ImageSizeMin = 128;
5495 allocSize.ImageSizeMax = 1024;
5496 allocSize.Probability = 1;
5497 config.AllocationSizes.push_back(allocSize);
5498
5499 config.PoolSize = config.CalcAvgResourceSize() * 200;
5500 config.UsedItemCountMax = 160;
5501 config.TotalItemCount = config.UsedItemCountMax * 10;
5502 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
5503
5504 g_MemoryAliasingWarningEnabled = false;
5505 PoolTestResult result = {};
5506 TestPool_Benchmark(result, config);
5507 g_MemoryAliasingWarningEnabled = true;
5508
5509 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
5510}
5511
Adam Sawickib8333fb2018-03-13 16:15:53 +01005512static void PerformMainTests(FILE* file)
5513{
5514 uint32_t repeatCount = 1;
5515 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
5516
5517 Config config{};
5518 config.RandSeed = 65735476;
5519 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
5520 config.FreeOrder = FREE_ORDER::FORWARD;
5521
5522 size_t threadCountCount = 1;
5523 switch(ConfigType)
5524 {
5525 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
5526 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
5527 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
5528 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
5529 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
5530 default: assert(0);
5531 }
Adam Sawicki0667e332018-08-24 17:26:44 +02005532
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02005533 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02005534
Adam Sawickib8333fb2018-03-13 16:15:53 +01005535 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
5536 {
5537 std::string desc1;
5538
5539 switch(threadCountIndex)
5540 {
5541 case 0:
5542 desc1 += "1_thread";
5543 config.ThreadCount = 1;
5544 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5545 break;
5546 case 1:
5547 desc1 += "16_threads+0%_common";
5548 config.ThreadCount = 16;
5549 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5550 break;
5551 case 2:
5552 desc1 += "16_threads+50%_common";
5553 config.ThreadCount = 16;
5554 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
5555 break;
5556 case 3:
5557 desc1 += "16_threads+100%_common";
5558 config.ThreadCount = 16;
5559 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
5560 break;
5561 case 4:
5562 desc1 += "2_threads+0%_common";
5563 config.ThreadCount = 2;
5564 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5565 break;
5566 case 5:
5567 desc1 += "2_threads+50%_common";
5568 config.ThreadCount = 2;
5569 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
5570 break;
5571 case 6:
5572 desc1 += "2_threads+100%_common";
5573 config.ThreadCount = 2;
5574 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
5575 break;
5576 default:
5577 assert(0);
5578 }
5579
5580 // 0 = buffers, 1 = images, 2 = buffers and images
5581 size_t buffersVsImagesCount = 2;
5582 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
5583 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
5584 {
5585 std::string desc2 = desc1;
5586 switch(buffersVsImagesIndex)
5587 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005588 case 0: desc2 += ",Buffers"; break;
5589 case 1: desc2 += ",Images"; break;
5590 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005591 default: assert(0);
5592 }
5593
5594 // 0 = small, 1 = large, 2 = small and large
5595 size_t smallVsLargeCount = 2;
5596 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
5597 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
5598 {
5599 std::string desc3 = desc2;
5600 switch(smallVsLargeIndex)
5601 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005602 case 0: desc3 += ",Small"; break;
5603 case 1: desc3 += ",Large"; break;
5604 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005605 default: assert(0);
5606 }
5607
5608 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5609 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
5610 else
5611 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
5612
5613 // 0 = varying sizes min...max, 1 = set of constant sizes
5614 size_t constantSizesCount = 1;
5615 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
5616 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
5617 {
5618 std::string desc4 = desc3;
5619 switch(constantSizesIndex)
5620 {
5621 case 0: desc4 += " Varying_sizes"; break;
5622 case 1: desc4 += " Constant_sizes"; break;
5623 default: assert(0);
5624 }
5625
5626 config.AllocationSizes.clear();
5627 // Buffers present
5628 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
5629 {
5630 // Small
5631 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5632 {
5633 // Varying size
5634 if(constantSizesIndex == 0)
5635 config.AllocationSizes.push_back({4, 16, 1024});
5636 // Constant sizes
5637 else
5638 {
5639 config.AllocationSizes.push_back({1, 16, 16});
5640 config.AllocationSizes.push_back({1, 64, 64});
5641 config.AllocationSizes.push_back({1, 256, 256});
5642 config.AllocationSizes.push_back({1, 1024, 1024});
5643 }
5644 }
5645 // Large
5646 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5647 {
5648 // Varying size
5649 if(constantSizesIndex == 0)
5650 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5651 // Constant sizes
5652 else
5653 {
5654 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
5655 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
5656 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
5657 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
5658 }
5659 }
5660 }
5661 // Images present
5662 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
5663 {
5664 // Small
5665 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5666 {
5667 // Varying size
5668 if(constantSizesIndex == 0)
5669 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5670 // Constant sizes
5671 else
5672 {
5673 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
5674 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
5675 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
5676 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
5677 }
5678 }
5679 // Large
5680 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5681 {
5682 // Varying size
5683 if(constantSizesIndex == 0)
5684 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5685 // Constant sizes
5686 else
5687 {
5688 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
5689 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
5690 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
5691 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
5692 }
5693 }
5694 }
5695
5696 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
5697 size_t beginBytesToAllocateCount = 1;
5698 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
5699 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
5700 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
5701 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
5702 {
5703 std::string desc5 = desc4;
5704
5705 switch(beginBytesToAllocateIndex)
5706 {
5707 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005708 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005709 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
5710 config.AdditionalOperationCount = 0;
5711 break;
5712 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005713 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005714 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
5715 config.AdditionalOperationCount = 1024;
5716 break;
5717 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005718 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005719 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
5720 config.AdditionalOperationCount = 1024;
5721 break;
5722 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005723 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005724 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
5725 config.AdditionalOperationCount = 1024;
5726 break;
5727 default:
5728 assert(0);
5729 }
5730
Adam Sawicki0667e332018-08-24 17:26:44 +02005731 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01005732 {
Adam Sawicki0667e332018-08-24 17:26:44 +02005733 std::string desc6 = desc5;
5734 switch(strategyIndex)
5735 {
5736 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005737 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005738 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
5739 break;
5740 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005741 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005742 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
5743 break;
5744 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005745 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005746 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
5747 break;
5748 default:
5749 assert(0);
5750 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01005751
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005752 desc6 += ',';
5753 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02005754
5755 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02005756
5757 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
5758 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005759 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02005760
5761 Result result{};
5762 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005763 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02005764 if(file)
5765 {
5766 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
5767 }
Adam Sawicki0667e332018-08-24 17:26:44 +02005768 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01005769 }
5770 }
5771 }
5772 }
5773 }
5774 }
5775}
5776
5777static void PerformPoolTests(FILE* file)
5778{
5779 const size_t AVG_RESOURCES_PER_POOL = 300;
5780
5781 uint32_t repeatCount = 1;
5782 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
5783
5784 PoolTestConfig config{};
5785 config.RandSeed = 2346343;
5786 config.FrameCount = 200;
5787 config.ItemsToMakeUnusedPercent = 2;
5788
5789 size_t threadCountCount = 1;
5790 switch(ConfigType)
5791 {
5792 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
5793 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
5794 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
5795 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
5796 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
5797 default: assert(0);
5798 }
5799 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
5800 {
5801 std::string desc1;
5802
5803 switch(threadCountIndex)
5804 {
5805 case 0:
5806 desc1 += "1_thread";
5807 config.ThreadCount = 1;
5808 break;
5809 case 1:
5810 desc1 += "16_threads";
5811 config.ThreadCount = 16;
5812 break;
5813 case 2:
5814 desc1 += "2_threads";
5815 config.ThreadCount = 2;
5816 break;
5817 default:
5818 assert(0);
5819 }
5820
5821 // 0 = buffers, 1 = images, 2 = buffers and images
5822 size_t buffersVsImagesCount = 2;
5823 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
5824 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
5825 {
5826 std::string desc2 = desc1;
5827 switch(buffersVsImagesIndex)
5828 {
5829 case 0: desc2 += " Buffers"; break;
5830 case 1: desc2 += " Images"; break;
5831 case 2: desc2 += " Buffers+Images"; break;
5832 default: assert(0);
5833 }
5834
5835 // 0 = small, 1 = large, 2 = small and large
5836 size_t smallVsLargeCount = 2;
5837 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
5838 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
5839 {
5840 std::string desc3 = desc2;
5841 switch(smallVsLargeIndex)
5842 {
5843 case 0: desc3 += " Small"; break;
5844 case 1: desc3 += " Large"; break;
5845 case 2: desc3 += " Small+Large"; break;
5846 default: assert(0);
5847 }
5848
5849 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5850 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
5851 else
5852 config.PoolSize = 4ull * 1024 * 1024;
5853
5854 // 0 = varying sizes min...max, 1 = set of constant sizes
5855 size_t constantSizesCount = 1;
5856 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
5857 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
5858 {
5859 std::string desc4 = desc3;
5860 switch(constantSizesIndex)
5861 {
5862 case 0: desc4 += " Varying_sizes"; break;
5863 case 1: desc4 += " Constant_sizes"; break;
5864 default: assert(0);
5865 }
5866
5867 config.AllocationSizes.clear();
5868 // Buffers present
5869 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
5870 {
5871 // Small
5872 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5873 {
5874 // Varying size
5875 if(constantSizesIndex == 0)
5876 config.AllocationSizes.push_back({4, 16, 1024});
5877 // Constant sizes
5878 else
5879 {
5880 config.AllocationSizes.push_back({1, 16, 16});
5881 config.AllocationSizes.push_back({1, 64, 64});
5882 config.AllocationSizes.push_back({1, 256, 256});
5883 config.AllocationSizes.push_back({1, 1024, 1024});
5884 }
5885 }
5886 // Large
5887 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5888 {
5889 // Varying size
5890 if(constantSizesIndex == 0)
5891 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5892 // Constant sizes
5893 else
5894 {
5895 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
5896 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
5897 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
5898 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
5899 }
5900 }
5901 }
5902 // Images present
5903 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
5904 {
5905 // Small
5906 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5907 {
5908 // Varying size
5909 if(constantSizesIndex == 0)
5910 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5911 // Constant sizes
5912 else
5913 {
5914 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
5915 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
5916 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
5917 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
5918 }
5919 }
5920 // Large
5921 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5922 {
5923 // Varying size
5924 if(constantSizesIndex == 0)
5925 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5926 // Constant sizes
5927 else
5928 {
5929 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
5930 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
5931 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
5932 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
5933 }
5934 }
5935 }
5936
5937 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
5938 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
5939
5940 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
5941 size_t subscriptionModeCount;
5942 switch(ConfigType)
5943 {
5944 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
5945 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
5946 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
5947 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
5948 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
5949 default: assert(0);
5950 }
5951 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
5952 {
5953 std::string desc5 = desc4;
5954
5955 switch(subscriptionModeIndex)
5956 {
5957 case 0:
5958 desc5 += " Subscription_66%";
5959 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
5960 break;
5961 case 1:
5962 desc5 += " Subscription_133%";
5963 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
5964 break;
5965 case 2:
5966 desc5 += " Subscription_100%";
5967 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
5968 break;
5969 case 3:
5970 desc5 += " Subscription_33%";
5971 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
5972 break;
5973 case 4:
5974 desc5 += " Subscription_166%";
5975 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
5976 break;
5977 default:
5978 assert(0);
5979 }
5980
5981 config.TotalItemCount = config.UsedItemCountMax * 5;
5982 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
5983
5984 const char* testDescription = desc5.c_str();
5985
5986 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
5987 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005988 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005989
5990 PoolTestResult result{};
5991 g_MemoryAliasingWarningEnabled = false;
5992 TestPool_Benchmark(result, config);
5993 g_MemoryAliasingWarningEnabled = true;
5994 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
5995 }
5996 }
5997 }
5998 }
5999 }
6000 }
6001}
6002
Adam Sawickia83793a2018-09-03 13:40:42 +02006003static void BasicTestBuddyAllocator()
6004{
6005 wprintf(L"Basic test buddy allocator\n");
6006
6007 RandomNumberGenerator rand{76543};
6008
6009 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
6010 sampleBufCreateInfo.size = 1024; // Whatever.
6011 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
6012
6013 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
6014 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
6015
6016 VmaPoolCreateInfo poolCreateInfo = {};
6017 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006018 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006019
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02006020 // Deliberately adding 1023 to test usable size smaller than memory block size.
6021 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02006022 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02006023 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02006024
6025 VmaPool pool = nullptr;
6026 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006027 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006028
6029 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
6030
6031 VmaAllocationCreateInfo allocCreateInfo = {};
6032 allocCreateInfo.pool = pool;
6033
6034 std::vector<BufferInfo> bufInfo;
6035 BufferInfo newBufInfo;
6036 VmaAllocationInfo allocInfo;
6037
6038 bufCreateInfo.size = 1024 * 256;
6039 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6040 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006041 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006042 bufInfo.push_back(newBufInfo);
6043
6044 bufCreateInfo.size = 1024 * 512;
6045 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6046 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006047 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006048 bufInfo.push_back(newBufInfo);
6049
6050 bufCreateInfo.size = 1024 * 128;
6051 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6052 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006053 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006054 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02006055
6056 // Test very small allocation, smaller than minimum node size.
6057 bufCreateInfo.size = 1;
6058 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6059 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006060 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02006061 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02006062
Adam Sawicki9933c5c2018-09-21 14:57:24 +02006063 // Test some small allocation with alignment requirement.
6064 {
6065 VkMemoryRequirements memReq;
6066 memReq.alignment = 256;
6067 memReq.memoryTypeBits = UINT32_MAX;
6068 memReq.size = 32;
6069
6070 newBufInfo.Buffer = VK_NULL_HANDLE;
6071 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
6072 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006073 TEST(res == VK_SUCCESS);
6074 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02006075 bufInfo.push_back(newBufInfo);
6076 }
6077
6078 //SaveAllocatorStatsToFile(L"TEST.json");
6079
Adam Sawicki21017c62018-09-07 15:26:59 +02006080 VmaPoolStats stats = {};
6081 vmaGetPoolStats(g_hAllocator, pool, &stats);
6082 int DBG = 0; // Set breakpoint here to inspect `stats`.
6083
Adam Sawicki80927152018-09-07 17:27:23 +02006084 // Allocate enough new buffers to surely fall into second block.
6085 for(uint32_t i = 0; i < 32; ++i)
6086 {
6087 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
6088 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6089 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006090 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02006091 bufInfo.push_back(newBufInfo);
6092 }
6093
6094 SaveAllocatorStatsToFile(L"BuddyTest01.json");
6095
Adam Sawickia83793a2018-09-03 13:40:42 +02006096 // Destroy the buffers in random order.
6097 while(!bufInfo.empty())
6098 {
6099 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
6100 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
6101 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
6102 bufInfo.erase(bufInfo.begin() + indexToDestroy);
6103 }
6104
6105 vmaDestroyPool(g_hAllocator, pool);
6106}
6107
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006108static void BasicTestAllocatePages()
6109{
6110 wprintf(L"Basic test allocate pages\n");
6111
6112 RandomNumberGenerator rand{765461};
6113
6114 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
6115 sampleBufCreateInfo.size = 1024; // Whatever.
6116 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
6117
6118 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
6119 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
6120
6121 VmaPoolCreateInfo poolCreateInfo = {};
6122 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02006123 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006124
6125 // 1 block of 1 MB.
6126 poolCreateInfo.blockSize = 1024 * 1024;
6127 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
6128
6129 // Create pool.
6130 VmaPool pool = nullptr;
6131 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02006132 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006133
6134 // Make 100 allocations of 4 KB - they should fit into the pool.
6135 VkMemoryRequirements memReq;
6136 memReq.memoryTypeBits = UINT32_MAX;
6137 memReq.alignment = 4 * 1024;
6138 memReq.size = 4 * 1024;
6139
6140 VmaAllocationCreateInfo allocCreateInfo = {};
6141 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
6142 allocCreateInfo.pool = pool;
6143
6144 constexpr uint32_t allocCount = 100;
6145
6146 std::vector<VmaAllocation> alloc{allocCount};
6147 std::vector<VmaAllocationInfo> allocInfo{allocCount};
6148 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006149 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006150 for(uint32_t i = 0; i < allocCount; ++i)
6151 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006152 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006153 allocInfo[i].pMappedData != nullptr &&
6154 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
6155 allocInfo[i].memoryType == allocInfo[0].memoryType);
6156 }
6157
6158 // Free the allocations.
6159 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
6160 std::fill(alloc.begin(), alloc.end(), nullptr);
6161 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
6162
6163 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
6164 // Also test optional allocationInfo = null.
6165 memReq.size = 100 * 1024;
6166 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02006167 TEST(res != VK_SUCCESS);
6168 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006169
6170 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
6171 memReq.size = 4 * 1024;
6172 memReq.alignment = 128 * 1024;
6173 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006174 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006175
6176 // Make 100 dedicated allocations of 4 KB.
6177 memReq.alignment = 4 * 1024;
6178 memReq.size = 4 * 1024;
6179
6180 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
6181 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
6182 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
6183 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006184 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006185 for(uint32_t i = 0; i < allocCount; ++i)
6186 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006187 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006188 allocInfo[i].pMappedData != nullptr &&
6189 allocInfo[i].memoryType == allocInfo[0].memoryType &&
6190 allocInfo[i].offset == 0);
6191 if(i > 0)
6192 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006193 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006194 }
6195 }
6196
6197 // Free the allocations.
6198 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
6199 std::fill(alloc.begin(), alloc.end(), nullptr);
6200 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
6201
6202 vmaDestroyPool(g_hAllocator, pool);
6203}
6204
Adam Sawickif2975342018-10-16 13:49:02 +02006205// Test the testing environment.
6206static void TestGpuData()
6207{
6208 RandomNumberGenerator rand = { 53434 };
6209
6210 std::vector<AllocInfo> allocInfo;
6211
6212 for(size_t i = 0; i < 100; ++i)
6213 {
6214 AllocInfo info = {};
6215
6216 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
6217 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
6218 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
6219 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
6220 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
6221
6222 VmaAllocationCreateInfo allocCreateInfo = {};
6223 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
6224
6225 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
6226 TEST(res == VK_SUCCESS);
6227
6228 info.m_StartValue = rand.Generate();
6229
6230 allocInfo.push_back(std::move(info));
6231 }
6232
6233 UploadGpuData(allocInfo.data(), allocInfo.size());
6234
6235 ValidateGpuData(allocInfo.data(), allocInfo.size());
6236
6237 DestroyAllAllocations(allocInfo);
6238}
6239
Adam Sawickib8333fb2018-03-13 16:15:53 +01006240void Test()
6241{
6242 wprintf(L"TESTING:\n");
6243
Adam Sawicki48b8a332019-11-02 15:24:33 +01006244 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02006245 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01006246 ////////////////////////////////////////////////////////////////////////////////
6247 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02006248 return;
6249 }
6250
Adam Sawickib8333fb2018-03-13 16:15:53 +01006251 // # Simple tests
6252
6253 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02006254 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02006255#if VMA_DEBUG_MARGIN
6256 TestDebugMargin();
6257#else
6258 TestPool_SameSize();
Adam Sawickiddcbf8c2019-11-22 15:22:42 +01006259 TestPool_MinBlockCount();
Adam Sawicki212a4a62018-06-14 15:44:45 +02006260 TestHeapSizeLimit();
6261#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02006262#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
6263 TestAllocationsInitialization();
6264#endif
Adam Sawickiefa88c42019-11-18 16:33:56 +01006265 TestMemoryUsage();
Adam Sawicki50882502020-02-07 16:51:31 +01006266 TestDeviceCoherentMemory();
Adam Sawicki40ffe982019-10-11 15:56:02 +02006267 TestBudget();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006268 TestMapping();
Adam Sawickidaa6a552019-06-25 15:26:37 +02006269 TestDeviceLocalMapped();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006270 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02006271 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02006272 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02006273 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006274
Adam Sawicki4338f662018-09-07 14:12:37 +02006275 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006276 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02006277
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006278 {
6279 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02006280 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006281 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02006282 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006283 fclose(file);
6284 }
6285
Adam Sawickib8333fb2018-03-13 16:15:53 +01006286 TestDefragmentationSimple();
6287 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01006288 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01006289 TestDefragmentationGpu();
Adam Sawickia52012d2019-12-23 15:28:51 +01006290 TestDefragmentationIncrementalBasic();
6291 TestDefragmentationIncrementalComplex();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006292
6293 // # Detailed tests
6294 FILE* file;
6295 fopen_s(&file, "Results.csv", "w");
6296 assert(file != NULL);
6297
6298 WriteMainTestResultHeader(file);
6299 PerformMainTests(file);
6300 //PerformCustomMainTest(file);
6301
6302 WritePoolTestResultHeader(file);
6303 PerformPoolTests(file);
6304 //PerformCustomPoolTest(file);
6305
6306 fclose(file);
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01006307
Adam Sawickib8333fb2018-03-13 16:15:53 +01006308 wprintf(L"Done.\n");
6309}
6310
Adam Sawickif1a793c2018-03-13 15:42:22 +01006311#endif // #ifdef _WIN32