blob: 1890326da9d5cf810a6959b20940f1855e1c0533 [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
Adam Sawicki50882502020-02-07 16:51:31 +01002// Copyright (c) 2017-2020 Advanced Micro Devices, Inc. All rights reserved.
Adam Sawickiae5c4662019-01-02 10:23:35 +01003//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawicki94ce3d72019-04-17 14:59:25 +020029#include <functional>
Adam Sawickif1a793c2018-03-13 15:42:22 +010030
31#ifdef _WIN32
32
Adam Sawicki33d2ce72018-08-27 13:59:13 +020033static const char* CODE_DESCRIPTION = "Foo";
34
Adam Sawickif2975342018-10-16 13:49:02 +020035extern VkCommandBuffer g_hTemporaryCommandBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +020036extern const VkAllocationCallbacks* g_Allocs;
Adam Sawickif2975342018-10-16 13:49:02 +020037void BeginSingleTimeCommands();
38void EndSingleTimeCommands();
39
Adam Sawickibdb89a92018-12-13 11:56:30 +010040#ifndef VMA_DEBUG_MARGIN
41 #define VMA_DEBUG_MARGIN 0
42#endif
43
Adam Sawicki0a607132018-08-24 11:18:41 +020044enum CONFIG_TYPE {
45 CONFIG_TYPE_MINIMUM,
46 CONFIG_TYPE_SMALL,
47 CONFIG_TYPE_AVERAGE,
48 CONFIG_TYPE_LARGE,
49 CONFIG_TYPE_MAXIMUM,
50 CONFIG_TYPE_COUNT
51};
52
Adam Sawickif2975342018-10-16 13:49:02 +020053static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
54//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020055
Adam Sawickib8333fb2018-03-13 16:15:53 +010056enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
57
Adam Sawicki0667e332018-08-24 17:26:44 +020058static const char* FREE_ORDER_NAMES[] = {
59 "FORWARD",
60 "BACKWARD",
61 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020062};
63
Adam Sawicki80927152018-09-07 17:27:23 +020064// Copy of internal VmaAlgorithmToStr.
65static const char* AlgorithmToStr(uint32_t algorithm)
66{
67 switch(algorithm)
68 {
69 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
70 return "Linear";
71 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
72 return "Buddy";
73 case 0:
74 return "Default";
75 default:
76 assert(0);
77 return "";
78 }
79}
80
Adam Sawickib8333fb2018-03-13 16:15:53 +010081struct AllocationSize
82{
83 uint32_t Probability;
84 VkDeviceSize BufferSizeMin, BufferSizeMax;
85 uint32_t ImageSizeMin, ImageSizeMax;
86};
87
88struct Config
89{
90 uint32_t RandSeed;
91 VkDeviceSize BeginBytesToAllocate;
92 uint32_t AdditionalOperationCount;
93 VkDeviceSize MaxBytesToAllocate;
94 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
95 std::vector<AllocationSize> AllocationSizes;
96 uint32_t ThreadCount;
97 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
98 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020099 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +0100100};
101
102struct Result
103{
104 duration TotalTime;
105 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
106 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
107 VkDeviceSize TotalMemoryAllocated;
108 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
109};
110
111void TestDefragmentationSimple();
112void TestDefragmentationFull();
113
114struct PoolTestConfig
115{
116 uint32_t RandSeed;
117 uint32_t ThreadCount;
118 VkDeviceSize PoolSize;
119 uint32_t FrameCount;
120 uint32_t TotalItemCount;
121 // Range for number of items used in each frame.
122 uint32_t UsedItemCountMin, UsedItemCountMax;
123 // Percent of items to make unused, and possibly make some others used in each frame.
124 uint32_t ItemsToMakeUnusedPercent;
125 std::vector<AllocationSize> AllocationSizes;
126
127 VkDeviceSize CalcAvgResourceSize() const
128 {
129 uint32_t probabilitySum = 0;
130 VkDeviceSize sizeSum = 0;
131 for(size_t i = 0; i < AllocationSizes.size(); ++i)
132 {
133 const AllocationSize& allocSize = AllocationSizes[i];
134 if(allocSize.BufferSizeMax > 0)
135 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
136 else
137 {
138 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
139 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
140 }
141 probabilitySum += allocSize.Probability;
142 }
143 return sizeSum / probabilitySum;
144 }
145
146 bool UsesBuffers() const
147 {
148 for(size_t i = 0; i < AllocationSizes.size(); ++i)
149 if(AllocationSizes[i].BufferSizeMax > 0)
150 return true;
151 return false;
152 }
153
154 bool UsesImages() const
155 {
156 for(size_t i = 0; i < AllocationSizes.size(); ++i)
157 if(AllocationSizes[i].ImageSizeMax > 0)
158 return true;
159 return false;
160 }
161};
162
163struct PoolTestResult
164{
165 duration TotalTime;
166 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
167 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
168 size_t LostAllocationCount, LostAllocationTotalSize;
169 size_t FailedAllocationCount, FailedAllocationTotalSize;
170};
171
172static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
173
Adam Sawicki51fa9662018-10-03 13:44:29 +0200174uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200175
Adam Sawickib8333fb2018-03-13 16:15:53 +0100176struct BufferInfo
177{
178 VkBuffer Buffer = VK_NULL_HANDLE;
179 VmaAllocation Allocation = VK_NULL_HANDLE;
180};
181
Adam Sawicki40ffe982019-10-11 15:56:02 +0200182static uint32_t MemoryTypeToHeap(uint32_t memoryTypeIndex)
183{
184 const VkPhysicalDeviceMemoryProperties* props;
185 vmaGetMemoryProperties(g_hAllocator, &props);
186 return props->memoryTypes[memoryTypeIndex].heapIndex;
187}
188
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200189static uint32_t GetAllocationStrategyCount()
190{
191 uint32_t strategyCount = 0;
192 switch(ConfigType)
193 {
194 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
195 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
196 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
197 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
198 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
199 default: assert(0);
200 }
201 return strategyCount;
202}
203
204static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
205{
206 switch(allocStrategy)
207 {
208 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
209 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
210 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
211 case 0: return "Default"; break;
212 default: assert(0); return "";
213 }
214}
215
Adam Sawickib8333fb2018-03-13 16:15:53 +0100216static void InitResult(Result& outResult)
217{
218 outResult.TotalTime = duration::zero();
219 outResult.AllocationTimeMin = duration::max();
220 outResult.AllocationTimeAvg = duration::zero();
221 outResult.AllocationTimeMax = duration::min();
222 outResult.DeallocationTimeMin = duration::max();
223 outResult.DeallocationTimeAvg = duration::zero();
224 outResult.DeallocationTimeMax = duration::min();
225 outResult.TotalMemoryAllocated = 0;
226 outResult.FreeRangeSizeAvg = 0;
227 outResult.FreeRangeSizeMax = 0;
228}
229
230class TimeRegisterObj
231{
232public:
233 TimeRegisterObj(duration& min, duration& sum, duration& max) :
234 m_Min(min),
235 m_Sum(sum),
236 m_Max(max),
237 m_TimeBeg(std::chrono::high_resolution_clock::now())
238 {
239 }
240
241 ~TimeRegisterObj()
242 {
243 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
244 m_Sum += d;
245 if(d < m_Min) m_Min = d;
246 if(d > m_Max) m_Max = d;
247 }
248
249private:
250 duration& m_Min;
251 duration& m_Sum;
252 duration& m_Max;
253 time_point m_TimeBeg;
254};
255
256struct PoolTestThreadResult
257{
258 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
259 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
260 size_t AllocationCount, DeallocationCount;
261 size_t LostAllocationCount, LostAllocationTotalSize;
262 size_t FailedAllocationCount, FailedAllocationTotalSize;
263};
264
265class AllocationTimeRegisterObj : public TimeRegisterObj
266{
267public:
268 AllocationTimeRegisterObj(Result& result) :
269 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
270 {
271 }
272};
273
274class DeallocationTimeRegisterObj : public TimeRegisterObj
275{
276public:
277 DeallocationTimeRegisterObj(Result& result) :
278 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
279 {
280 }
281};
282
283class PoolAllocationTimeRegisterObj : public TimeRegisterObj
284{
285public:
286 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
287 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
288 {
289 }
290};
291
292class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
293{
294public:
295 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
296 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
297 {
298 }
299};
300
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200301static void CurrentTimeToStr(std::string& out)
302{
303 time_t rawTime; time(&rawTime);
304 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
305 char timeStr[128];
306 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
307 out = timeStr;
308}
309
Adam Sawickib8333fb2018-03-13 16:15:53 +0100310VkResult MainTest(Result& outResult, const Config& config)
311{
312 assert(config.ThreadCount > 0);
313
314 InitResult(outResult);
315
316 RandomNumberGenerator mainRand{config.RandSeed};
317
318 time_point timeBeg = std::chrono::high_resolution_clock::now();
319
320 std::atomic<size_t> allocationCount = 0;
321 VkResult res = VK_SUCCESS;
322
323 uint32_t memUsageProbabilitySum =
324 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
325 config.MemUsageProbability[2] + config.MemUsageProbability[3];
326 assert(memUsageProbabilitySum > 0);
327
328 uint32_t allocationSizeProbabilitySum = std::accumulate(
329 config.AllocationSizes.begin(),
330 config.AllocationSizes.end(),
331 0u,
332 [](uint32_t sum, const AllocationSize& allocSize) {
333 return sum + allocSize.Probability;
334 });
335
336 struct Allocation
337 {
338 VkBuffer Buffer;
339 VkImage Image;
340 VmaAllocation Alloc;
341 };
342
343 std::vector<Allocation> commonAllocations;
344 std::mutex commonAllocationsMutex;
345
346 auto Allocate = [&](
347 VkDeviceSize bufferSize,
348 const VkExtent2D imageExtent,
349 RandomNumberGenerator& localRand,
350 VkDeviceSize& totalAllocatedBytes,
351 std::vector<Allocation>& allocations) -> VkResult
352 {
353 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
354
355 uint32_t memUsageIndex = 0;
356 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
357 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
358 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
359
360 VmaAllocationCreateInfo memReq = {};
361 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200362 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100363
364 Allocation allocation = {};
365 VmaAllocationInfo allocationInfo;
366
367 // Buffer
368 if(bufferSize > 0)
369 {
370 assert(imageExtent.width == 0);
371 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
372 bufferInfo.size = bufferSize;
373 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
374
375 {
376 AllocationTimeRegisterObj timeRegisterObj{outResult};
377 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
378 }
379 }
380 // Image
381 else
382 {
383 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
384 imageInfo.imageType = VK_IMAGE_TYPE_2D;
385 imageInfo.extent.width = imageExtent.width;
386 imageInfo.extent.height = imageExtent.height;
387 imageInfo.extent.depth = 1;
388 imageInfo.mipLevels = 1;
389 imageInfo.arrayLayers = 1;
390 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
391 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
392 VK_IMAGE_TILING_OPTIMAL :
393 VK_IMAGE_TILING_LINEAR;
394 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
395 switch(memReq.usage)
396 {
397 case VMA_MEMORY_USAGE_GPU_ONLY:
398 switch(localRand.Generate() % 3)
399 {
400 case 0:
401 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
402 break;
403 case 1:
404 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
405 break;
406 case 2:
407 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
408 break;
409 }
410 break;
411 case VMA_MEMORY_USAGE_CPU_ONLY:
412 case VMA_MEMORY_USAGE_CPU_TO_GPU:
413 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
414 break;
415 case VMA_MEMORY_USAGE_GPU_TO_CPU:
416 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
417 break;
418 }
419 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
420 imageInfo.flags = 0;
421
422 {
423 AllocationTimeRegisterObj timeRegisterObj{outResult};
424 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
425 }
426 }
427
428 if(res == VK_SUCCESS)
429 {
430 ++allocationCount;
431 totalAllocatedBytes += allocationInfo.size;
432 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
433 if(useCommonAllocations)
434 {
435 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
436 commonAllocations.push_back(allocation);
437 }
438 else
439 allocations.push_back(allocation);
440 }
441 else
442 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200443 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100444 }
445 return res;
446 };
447
448 auto GetNextAllocationSize = [&](
449 VkDeviceSize& outBufSize,
450 VkExtent2D& outImageSize,
451 RandomNumberGenerator& localRand)
452 {
453 outBufSize = 0;
454 outImageSize = {0, 0};
455
456 uint32_t allocSizeIndex = 0;
457 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
458 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
459 r -= config.AllocationSizes[allocSizeIndex++].Probability;
460
461 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
462 if(allocSize.BufferSizeMax > 0)
463 {
464 assert(allocSize.ImageSizeMax == 0);
465 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
466 outBufSize = allocSize.BufferSizeMin;
467 else
468 {
469 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
470 outBufSize = outBufSize / 16 * 16;
471 }
472 }
473 else
474 {
475 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
476 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
477 else
478 {
479 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
480 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
481 }
482 }
483 };
484
485 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
486 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
487
488 auto ThreadProc = [&](uint32_t randSeed) -> void
489 {
490 RandomNumberGenerator threadRand(randSeed);
491 VkDeviceSize threadTotalAllocatedBytes = 0;
492 std::vector<Allocation> threadAllocations;
493 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
494 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
495 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
496
497 // BEGIN ALLOCATIONS
498 for(;;)
499 {
500 VkDeviceSize bufferSize = 0;
501 VkExtent2D imageExtent = {};
502 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
503 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
504 threadBeginBytesToAllocate)
505 {
506 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
507 break;
508 }
509 else
510 break;
511 }
512
513 // ADDITIONAL ALLOCATIONS AND FREES
514 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
515 {
516 VkDeviceSize bufferSize = 0;
517 VkExtent2D imageExtent = {};
518 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
519
520 // true = allocate, false = free
521 bool allocate = threadRand.Generate() % 2 != 0;
522
523 if(allocate)
524 {
525 if(threadTotalAllocatedBytes +
526 bufferSize +
527 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
528 threadMaxBytesToAllocate)
529 {
530 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
531 break;
532 }
533 }
534 else
535 {
536 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
537 if(useCommonAllocations)
538 {
539 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
540 if(!commonAllocations.empty())
541 {
542 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
543 VmaAllocationInfo allocationInfo;
544 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
545 if(threadTotalAllocatedBytes >= allocationInfo.size)
546 {
547 DeallocationTimeRegisterObj timeRegisterObj{outResult};
548 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
549 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
550 else
551 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
552 threadTotalAllocatedBytes -= allocationInfo.size;
553 commonAllocations.erase(commonAllocations.begin() + indexToFree);
554 }
555 }
556 }
557 else
558 {
559 if(!threadAllocations.empty())
560 {
561 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
562 VmaAllocationInfo allocationInfo;
563 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
564 if(threadTotalAllocatedBytes >= allocationInfo.size)
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 threadTotalAllocatedBytes -= allocationInfo.size;
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 }
575 }
576 }
577 }
578
579 ++numThreadsReachedMaxAllocations;
580
581 WaitForSingleObject(threadsFinishEvent, INFINITE);
582
583 // DEALLOCATION
584 while(!threadAllocations.empty())
585 {
586 size_t indexToFree = 0;
587 switch(config.FreeOrder)
588 {
589 case FREE_ORDER::FORWARD:
590 indexToFree = 0;
591 break;
592 case FREE_ORDER::BACKWARD:
593 indexToFree = threadAllocations.size() - 1;
594 break;
595 case FREE_ORDER::RANDOM:
596 indexToFree = mainRand.Generate() % threadAllocations.size();
597 break;
598 }
599
600 {
601 DeallocationTimeRegisterObj timeRegisterObj{outResult};
602 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
603 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
604 else
605 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
606 }
607 threadAllocations.erase(threadAllocations.begin() + indexToFree);
608 }
609 };
610
611 uint32_t threadRandSeed = mainRand.Generate();
612 std::vector<std::thread> bkgThreads;
613 for(size_t i = 0; i < config.ThreadCount; ++i)
614 {
615 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
616 }
617
618 // Wait for threads reached max allocations
619 while(numThreadsReachedMaxAllocations < config.ThreadCount)
620 Sleep(0);
621
622 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
623 VmaStats vmaStats = {};
624 vmaCalculateStats(g_hAllocator, &vmaStats);
625 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
626 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
627 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
628
629 // Signal threads to deallocate
630 SetEvent(threadsFinishEvent);
631
632 // Wait for threads finished
633 for(size_t i = 0; i < bkgThreads.size(); ++i)
634 bkgThreads[i].join();
635 bkgThreads.clear();
636
637 CloseHandle(threadsFinishEvent);
638
639 // Deallocate remaining common resources
640 while(!commonAllocations.empty())
641 {
642 size_t indexToFree = 0;
643 switch(config.FreeOrder)
644 {
645 case FREE_ORDER::FORWARD:
646 indexToFree = 0;
647 break;
648 case FREE_ORDER::BACKWARD:
649 indexToFree = commonAllocations.size() - 1;
650 break;
651 case FREE_ORDER::RANDOM:
652 indexToFree = mainRand.Generate() % commonAllocations.size();
653 break;
654 }
655
656 {
657 DeallocationTimeRegisterObj timeRegisterObj{outResult};
658 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
659 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
660 else
661 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
662 }
663 commonAllocations.erase(commonAllocations.begin() + indexToFree);
664 }
665
666 if(allocationCount)
667 {
668 outResult.AllocationTimeAvg /= allocationCount;
669 outResult.DeallocationTimeAvg /= allocationCount;
670 }
671
672 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
673
674 return res;
675}
676
Adam Sawicki51fa9662018-10-03 13:44:29 +0200677void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100678{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100679 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100680 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200681 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100682 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200683 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100684}
685
686struct AllocInfo
687{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200688 VmaAllocation m_Allocation = VK_NULL_HANDLE;
689 VkBuffer m_Buffer = VK_NULL_HANDLE;
690 VkImage m_Image = VK_NULL_HANDLE;
Adam Sawickia52012d2019-12-23 15:28:51 +0100691 VkImageLayout m_ImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200692 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100693 union
694 {
695 VkBufferCreateInfo m_BufferInfo;
696 VkImageCreateInfo m_ImageInfo;
697 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200698
Adam Sawickic467e282019-12-23 16:38:31 +0100699 // After defragmentation.
700 VkBuffer m_NewBuffer = VK_NULL_HANDLE;
701 VkImage m_NewImage = VK_NULL_HANDLE;
702
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200703 void CreateBuffer(
704 const VkBufferCreateInfo& bufCreateInfo,
705 const VmaAllocationCreateInfo& allocCreateInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +0100706 void CreateImage(
707 const VkImageCreateInfo& imageCreateInfo,
708 const VmaAllocationCreateInfo& allocCreateInfo,
709 VkImageLayout layout);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200710 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100711};
712
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200713void AllocInfo::CreateBuffer(
714 const VkBufferCreateInfo& bufCreateInfo,
715 const VmaAllocationCreateInfo& allocCreateInfo)
716{
717 m_BufferInfo = bufCreateInfo;
718 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
719 TEST(res == VK_SUCCESS);
720}
Adam Sawickia52012d2019-12-23 15:28:51 +0100721void AllocInfo::CreateImage(
722 const VkImageCreateInfo& imageCreateInfo,
723 const VmaAllocationCreateInfo& allocCreateInfo,
724 VkImageLayout layout)
725{
726 m_ImageInfo = imageCreateInfo;
727 m_ImageLayout = layout;
728 VkResult res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &m_Image, &m_Allocation, nullptr);
729 TEST(res == VK_SUCCESS);
730}
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200731
732void AllocInfo::Destroy()
733{
734 if(m_Image)
735 {
Adam Sawickic467e282019-12-23 16:38:31 +0100736 assert(!m_Buffer);
Adam Sawicki1f84f622019-07-02 13:40:01 +0200737 vkDestroyImage(g_hDevice, m_Image, g_Allocs);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100738 m_Image = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200739 }
740 if(m_Buffer)
741 {
Adam Sawickic467e282019-12-23 16:38:31 +0100742 assert(!m_Image);
Adam Sawicki1f84f622019-07-02 13:40:01 +0200743 vkDestroyBuffer(g_hDevice, m_Buffer, g_Allocs);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100744 m_Buffer = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200745 }
746 if(m_Allocation)
747 {
748 vmaFreeMemory(g_hAllocator, m_Allocation);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100749 m_Allocation = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200750 }
751}
752
Adam Sawickif2975342018-10-16 13:49:02 +0200753class StagingBufferCollection
754{
755public:
756 StagingBufferCollection() { }
757 ~StagingBufferCollection();
758 // Returns false if maximum total size of buffers would be exceeded.
759 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
760 void ReleaseAllBuffers();
761
762private:
763 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
764 struct BufInfo
765 {
766 VmaAllocation Allocation = VK_NULL_HANDLE;
767 VkBuffer Buffer = VK_NULL_HANDLE;
768 VkDeviceSize Size = VK_WHOLE_SIZE;
769 void* MappedPtr = nullptr;
770 bool Used = false;
771 };
772 std::vector<BufInfo> m_Bufs;
773 // Including both used and unused.
774 VkDeviceSize m_TotalSize = 0;
775};
776
777StagingBufferCollection::~StagingBufferCollection()
778{
779 for(size_t i = m_Bufs.size(); i--; )
780 {
781 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
782 }
783}
784
785bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
786{
787 assert(size <= MAX_TOTAL_SIZE);
788
789 // Try to find existing unused buffer with best size.
790 size_t bestIndex = SIZE_MAX;
791 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
792 {
793 BufInfo& currBufInfo = m_Bufs[i];
794 if(!currBufInfo.Used && currBufInfo.Size >= size &&
795 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
796 {
797 bestIndex = i;
798 }
799 }
800
801 if(bestIndex != SIZE_MAX)
802 {
803 m_Bufs[bestIndex].Used = true;
804 outBuffer = m_Bufs[bestIndex].Buffer;
805 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
806 return true;
807 }
808
809 // Allocate new buffer with requested size.
810 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
811 {
812 BufInfo bufInfo;
813 bufInfo.Size = size;
814 bufInfo.Used = true;
815
816 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
817 bufCreateInfo.size = size;
818 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
819
820 VmaAllocationCreateInfo allocCreateInfo = {};
821 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
822 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
823
824 VmaAllocationInfo allocInfo;
825 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
826 bufInfo.MappedPtr = allocInfo.pMappedData;
827 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
828
829 outBuffer = bufInfo.Buffer;
830 outMappedPtr = bufInfo.MappedPtr;
831
832 m_Bufs.push_back(std::move(bufInfo));
833
834 m_TotalSize += size;
835
836 return true;
837 }
838
839 // There are some unused but smaller buffers: Free them and try again.
840 bool hasUnused = false;
841 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
842 {
843 if(!m_Bufs[i].Used)
844 {
845 hasUnused = true;
846 break;
847 }
848 }
849 if(hasUnused)
850 {
851 for(size_t i = m_Bufs.size(); i--; )
852 {
853 if(!m_Bufs[i].Used)
854 {
855 m_TotalSize -= m_Bufs[i].Size;
856 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
857 m_Bufs.erase(m_Bufs.begin() + i);
858 }
859 }
860
861 return AcquireBuffer(size, outBuffer, outMappedPtr);
862 }
863
864 return false;
865}
866
867void StagingBufferCollection::ReleaseAllBuffers()
868{
869 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
870 {
871 m_Bufs[i].Used = false;
872 }
873}
874
875static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
876{
877 StagingBufferCollection stagingBufs;
878
879 bool cmdBufferStarted = false;
880 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
881 {
882 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
883 if(currAllocInfo.m_Buffer)
884 {
885 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
886
887 VkBuffer stagingBuf = VK_NULL_HANDLE;
888 void* stagingBufMappedPtr = nullptr;
889 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
890 {
891 TEST(cmdBufferStarted);
892 EndSingleTimeCommands();
893 stagingBufs.ReleaseAllBuffers();
894 cmdBufferStarted = false;
895
896 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
897 TEST(ok);
898 }
899
900 // Fill staging buffer.
901 {
902 assert(size % sizeof(uint32_t) == 0);
903 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
904 uint32_t val = currAllocInfo.m_StartValue;
905 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
906 {
907 *stagingValPtr = val;
908 ++stagingValPtr;
909 ++val;
910 }
911 }
912
913 // Issue copy command from staging buffer to destination buffer.
914 if(!cmdBufferStarted)
915 {
916 cmdBufferStarted = true;
917 BeginSingleTimeCommands();
918 }
919
920 VkBufferCopy copy = {};
921 copy.srcOffset = 0;
922 copy.dstOffset = 0;
923 copy.size = size;
924 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
925 }
926 else
927 {
Adam Sawickia52012d2019-12-23 15:28:51 +0100928 TEST(currAllocInfo.m_ImageInfo.format == VK_FORMAT_R8G8B8A8_UNORM && "Only RGBA8 images are currently supported.");
929 TEST(currAllocInfo.m_ImageInfo.mipLevels == 1 && "Only single mip images are currently supported.");
930
Adam Sawickic467e282019-12-23 16:38:31 +0100931 const VkDeviceSize size = (VkDeviceSize)currAllocInfo.m_ImageInfo.extent.width * currAllocInfo.m_ImageInfo.extent.height * sizeof(uint32_t);
Adam Sawickia52012d2019-12-23 15:28:51 +0100932
933 VkBuffer stagingBuf = VK_NULL_HANDLE;
934 void* stagingBufMappedPtr = nullptr;
935 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
936 {
937 TEST(cmdBufferStarted);
938 EndSingleTimeCommands();
939 stagingBufs.ReleaseAllBuffers();
940 cmdBufferStarted = false;
941
942 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
943 TEST(ok);
944 }
945
946 // Fill staging buffer.
947 {
948 assert(size % sizeof(uint32_t) == 0);
949 uint32_t *stagingValPtr = (uint32_t *)stagingBufMappedPtr;
950 uint32_t val = currAllocInfo.m_StartValue;
951 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
952 {
953 *stagingValPtr = val;
954 ++stagingValPtr;
955 ++val;
956 }
957 }
958
959 // Issue copy command from staging buffer to destination buffer.
960 if(!cmdBufferStarted)
961 {
962 cmdBufferStarted = true;
963 BeginSingleTimeCommands();
964 }
965
966
967 // Transfer to transfer dst layout
968 VkImageSubresourceRange subresourceRange = {
969 VK_IMAGE_ASPECT_COLOR_BIT,
970 0, VK_REMAINING_MIP_LEVELS,
971 0, VK_REMAINING_ARRAY_LAYERS
972 };
973
974 VkImageMemoryBarrier barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
975 barrier.srcAccessMask = 0;
976 barrier.dstAccessMask = 0;
977 barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
978 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
979 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
980 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
981 barrier.image = currAllocInfo.m_Image;
982 barrier.subresourceRange = subresourceRange;
983
984 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,
985 0, nullptr,
986 0, nullptr,
987 1, &barrier);
988
989 // Copy image date
990 VkBufferImageCopy copy = {};
991 copy.bufferOffset = 0;
992 copy.bufferRowLength = 0;
993 copy.bufferImageHeight = 0;
994 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
995 copy.imageSubresource.layerCount = 1;
996 copy.imageExtent = currAllocInfo.m_ImageInfo.extent;
997
998 vkCmdCopyBufferToImage(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy);
999
1000 // Transfer to desired layout
1001 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1002 barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
1003 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1004 barrier.newLayout = currAllocInfo.m_ImageLayout;
1005
1006 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,
1007 0, nullptr,
1008 0, nullptr,
1009 1, &barrier);
Adam Sawickif2975342018-10-16 13:49:02 +02001010 }
1011 }
1012
1013 if(cmdBufferStarted)
1014 {
1015 EndSingleTimeCommands();
1016 stagingBufs.ReleaseAllBuffers();
1017 }
1018}
1019
1020static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
1021{
1022 StagingBufferCollection stagingBufs;
1023
1024 bool cmdBufferStarted = false;
1025 size_t validateAllocIndexOffset = 0;
1026 std::vector<void*> validateStagingBuffers;
1027 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
1028 {
1029 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
1030 if(currAllocInfo.m_Buffer)
1031 {
1032 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
1033
1034 VkBuffer stagingBuf = VK_NULL_HANDLE;
1035 void* stagingBufMappedPtr = nullptr;
1036 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
1037 {
1038 TEST(cmdBufferStarted);
1039 EndSingleTimeCommands();
1040 cmdBufferStarted = false;
1041
1042 for(size_t validateIndex = 0;
1043 validateIndex < validateStagingBuffers.size();
1044 ++validateIndex)
1045 {
1046 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
1047 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
1048 TEST(validateSize % sizeof(uint32_t) == 0);
1049 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
1050 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
1051 bool valid = true;
1052 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
1053 {
1054 if(*stagingValPtr != val)
1055 {
1056 valid = false;
1057 break;
1058 }
1059 ++stagingValPtr;
1060 ++val;
1061 }
1062 TEST(valid);
1063 }
1064
1065 stagingBufs.ReleaseAllBuffers();
1066
1067 validateAllocIndexOffset = allocInfoIndex;
1068 validateStagingBuffers.clear();
1069
1070 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
1071 TEST(ok);
1072 }
1073
1074 // Issue copy command from staging buffer to destination buffer.
1075 if(!cmdBufferStarted)
1076 {
1077 cmdBufferStarted = true;
1078 BeginSingleTimeCommands();
1079 }
1080
1081 VkBufferCopy copy = {};
1082 copy.srcOffset = 0;
1083 copy.dstOffset = 0;
1084 copy.size = size;
1085 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
1086
1087 // Sava mapped pointer for later validation.
1088 validateStagingBuffers.push_back(stagingBufMappedPtr);
1089 }
1090 else
1091 {
1092 TEST(0 && "Images not currently supported.");
1093 }
1094 }
1095
1096 if(cmdBufferStarted)
1097 {
1098 EndSingleTimeCommands();
1099
1100 for(size_t validateIndex = 0;
1101 validateIndex < validateStagingBuffers.size();
1102 ++validateIndex)
1103 {
1104 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
1105 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
1106 TEST(validateSize % sizeof(uint32_t) == 0);
1107 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
1108 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
1109 bool valid = true;
1110 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
1111 {
1112 if(*stagingValPtr != val)
1113 {
1114 valid = false;
1115 break;
1116 }
1117 ++stagingValPtr;
1118 ++val;
1119 }
1120 TEST(valid);
1121 }
1122
1123 stagingBufs.ReleaseAllBuffers();
1124 }
1125}
1126
Adam Sawickib8333fb2018-03-13 16:15:53 +01001127static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1128{
1129 outMemReq = {};
1130 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1131 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1132}
1133
1134static void CreateBuffer(
1135 VmaPool pool,
1136 const VkBufferCreateInfo& bufCreateInfo,
1137 bool persistentlyMapped,
1138 AllocInfo& outAllocInfo)
1139{
1140 outAllocInfo = {};
1141 outAllocInfo.m_BufferInfo = bufCreateInfo;
1142
1143 VmaAllocationCreateInfo allocCreateInfo = {};
1144 allocCreateInfo.pool = pool;
1145 if(persistentlyMapped)
1146 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1147
1148 VmaAllocationInfo vmaAllocInfo = {};
1149 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1150
1151 // Setup StartValue and fill.
1152 {
1153 outAllocInfo.m_StartValue = (uint32_t)rand();
1154 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001155 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001156 if(!persistentlyMapped)
1157 {
1158 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1159 }
1160
1161 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001162 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001163 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1164 data[i] = value++;
1165
1166 if(!persistentlyMapped)
1167 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1168 }
1169}
1170
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001171static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001172{
1173 outAllocation.m_Allocation = nullptr;
1174 outAllocation.m_Buffer = nullptr;
1175 outAllocation.m_Image = nullptr;
1176 outAllocation.m_StartValue = (uint32_t)rand();
1177
1178 VmaAllocationCreateInfo vmaMemReq;
1179 GetMemReq(vmaMemReq);
1180
1181 VmaAllocationInfo allocInfo;
1182
1183 const bool isBuffer = true;//(rand() & 0x1) != 0;
1184 const bool isLarge = (rand() % 16) == 0;
1185 if(isBuffer)
1186 {
1187 const uint32_t bufferSize = isLarge ?
1188 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1189 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1190
1191 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1192 bufferInfo.size = bufferSize;
1193 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1194
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001195 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001196 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001197 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001198 }
1199 else
1200 {
1201 const uint32_t imageSizeX = isLarge ?
1202 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1203 rand() % 1024 + 1; // 1 ... 1024
1204 const uint32_t imageSizeY = isLarge ?
1205 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1206 rand() % 1024 + 1; // 1 ... 1024
1207
1208 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1209 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1210 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1211 imageInfo.extent.width = imageSizeX;
1212 imageInfo.extent.height = imageSizeY;
1213 imageInfo.extent.depth = 1;
1214 imageInfo.mipLevels = 1;
1215 imageInfo.arrayLayers = 1;
1216 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1217 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1218 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1219 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1220
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001221 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001222 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001223 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001224 }
1225
1226 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1227 if(allocInfo.pMappedData == nullptr)
1228 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001229 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001230 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001231 }
1232
1233 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001234 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001235 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1236 data[i] = value++;
1237
1238 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001239 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001240}
1241
1242static void DestroyAllocation(const AllocInfo& allocation)
1243{
1244 if(allocation.m_Buffer)
1245 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1246 else
1247 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1248}
1249
1250static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1251{
1252 for(size_t i = allocations.size(); i--; )
1253 DestroyAllocation(allocations[i]);
1254 allocations.clear();
1255}
1256
1257static void ValidateAllocationData(const AllocInfo& allocation)
1258{
1259 VmaAllocationInfo allocInfo;
1260 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1261
1262 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1263 if(allocInfo.pMappedData == nullptr)
1264 {
1265 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001266 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001267 }
1268
1269 uint32_t value = allocation.m_StartValue;
1270 bool ok = true;
1271 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001272 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001273 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1274 {
1275 if(data[i] != value++)
1276 {
1277 ok = false;
1278 break;
1279 }
1280 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001281 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001282
1283 if(allocInfo.pMappedData == nullptr)
1284 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1285}
1286
1287static void RecreateAllocationResource(AllocInfo& allocation)
1288{
1289 VmaAllocationInfo allocInfo;
1290 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1291
1292 if(allocation.m_Buffer)
1293 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001294 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001295
Adam Sawicki1f84f622019-07-02 13:40:01 +02001296 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, g_Allocs, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001297 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001298
1299 // Just to silence validation layer warnings.
1300 VkMemoryRequirements vkMemReq;
1301 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001302 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001303
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001304 res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001305 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001306 }
1307 else
1308 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001309 vkDestroyImage(g_hDevice, allocation.m_Image, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001310
Adam Sawicki1f84f622019-07-02 13:40:01 +02001311 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, g_Allocs, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001312 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001313
1314 // Just to silence validation layer warnings.
1315 VkMemoryRequirements vkMemReq;
1316 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1317
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001318 res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001319 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001320 }
1321}
1322
1323static void Defragment(AllocInfo* allocs, size_t allocCount,
1324 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1325 VmaDefragmentationStats* defragmentationStats = nullptr)
1326{
1327 std::vector<VmaAllocation> vmaAllocs(allocCount);
1328 for(size_t i = 0; i < allocCount; ++i)
1329 vmaAllocs[i] = allocs[i].m_Allocation;
1330
1331 std::vector<VkBool32> allocChanged(allocCount);
1332
1333 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1334 defragmentationInfo, defragmentationStats) );
1335
1336 for(size_t i = 0; i < allocCount; ++i)
1337 {
1338 if(allocChanged[i])
1339 {
1340 RecreateAllocationResource(allocs[i]);
1341 }
1342 }
1343}
1344
1345static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1346{
1347 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1348 ValidateAllocationData(allocInfo);
1349 });
1350}
1351
1352void TestDefragmentationSimple()
1353{
1354 wprintf(L"Test defragmentation simple\n");
1355
1356 RandomNumberGenerator rand(667);
1357
1358 const VkDeviceSize BUF_SIZE = 0x10000;
1359 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1360
1361 const VkDeviceSize MIN_BUF_SIZE = 32;
1362 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1363 auto RandomBufSize = [&]() -> VkDeviceSize {
1364 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1365 };
1366
1367 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1368 bufCreateInfo.size = BUF_SIZE;
1369 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1370
1371 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1372 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1373
1374 uint32_t memTypeIndex = UINT32_MAX;
1375 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1376
1377 VmaPoolCreateInfo poolCreateInfo = {};
1378 poolCreateInfo.blockSize = BLOCK_SIZE;
1379 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1380
1381 VmaPool pool;
1382 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1383
Adam Sawickie1681912018-11-23 17:50:12 +01001384 // Defragmentation of empty pool.
1385 {
1386 VmaDefragmentationInfo2 defragInfo = {};
1387 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1388 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1389 defragInfo.poolCount = 1;
1390 defragInfo.pPools = &pool;
1391
1392 VmaDefragmentationStats defragStats = {};
1393 VmaDefragmentationContext defragCtx = nullptr;
1394 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1395 TEST(res >= VK_SUCCESS);
1396 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1397 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1398 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1399 }
1400
Adam Sawickib8333fb2018-03-13 16:15:53 +01001401 std::vector<AllocInfo> allocations;
1402
1403 // persistentlyMappedOption = 0 - not persistently mapped.
1404 // persistentlyMappedOption = 1 - persistently mapped.
1405 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1406 {
1407 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1408 const bool persistentlyMapped = persistentlyMappedOption != 0;
1409
1410 // # Test 1
1411 // Buffers of fixed size.
1412 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1413 // Expected result: at least 1 block freed.
1414 {
1415 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1416 {
1417 AllocInfo allocInfo;
1418 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1419 allocations.push_back(allocInfo);
1420 }
1421
1422 for(size_t i = 1; i < allocations.size(); ++i)
1423 {
1424 DestroyAllocation(allocations[i]);
1425 allocations.erase(allocations.begin() + i);
1426 }
1427
1428 VmaDefragmentationStats defragStats;
1429 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001430 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1431 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001432
1433 ValidateAllocationsData(allocations.data(), allocations.size());
1434
1435 DestroyAllAllocations(allocations);
1436 }
1437
1438 // # Test 2
1439 // Buffers of fixed size.
1440 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1441 // Expected result: Each of 4 interations makes some progress.
1442 {
1443 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1444 {
1445 AllocInfo allocInfo;
1446 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1447 allocations.push_back(allocInfo);
1448 }
1449
1450 for(size_t i = 1; i < allocations.size(); ++i)
1451 {
1452 DestroyAllocation(allocations[i]);
1453 allocations.erase(allocations.begin() + i);
1454 }
1455
1456 VmaDefragmentationInfo defragInfo = {};
1457 defragInfo.maxAllocationsToMove = 1;
1458 defragInfo.maxBytesToMove = BUF_SIZE;
1459
1460 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1461 {
1462 VmaDefragmentationStats defragStats;
1463 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001464 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001465 }
1466
1467 ValidateAllocationsData(allocations.data(), allocations.size());
1468
1469 DestroyAllAllocations(allocations);
1470 }
1471
1472 // # Test 3
1473 // Buffers of variable size.
1474 // Create a number of buffers. Remove some percent of them.
1475 // Defragment while having some percent of them unmovable.
1476 // Expected result: Just simple validation.
1477 {
1478 for(size_t i = 0; i < 100; ++i)
1479 {
1480 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1481 localBufCreateInfo.size = RandomBufSize();
1482
1483 AllocInfo allocInfo;
1484 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1485 allocations.push_back(allocInfo);
1486 }
1487
1488 const uint32_t percentToDelete = 60;
1489 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1490 for(size_t i = 0; i < numberToDelete; ++i)
1491 {
1492 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1493 DestroyAllocation(allocations[indexToDelete]);
1494 allocations.erase(allocations.begin() + indexToDelete);
1495 }
1496
1497 // Non-movable allocations will be at the beginning of allocations array.
1498 const uint32_t percentNonMovable = 20;
1499 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1500 for(size_t i = 0; i < numberNonMovable; ++i)
1501 {
1502 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1503 if(indexNonMovable != i)
1504 std::swap(allocations[i], allocations[indexNonMovable]);
1505 }
1506
1507 VmaDefragmentationStats defragStats;
1508 Defragment(
1509 allocations.data() + numberNonMovable,
1510 allocations.size() - numberNonMovable,
1511 nullptr, &defragStats);
1512
1513 ValidateAllocationsData(allocations.data(), allocations.size());
1514
1515 DestroyAllAllocations(allocations);
1516 }
1517 }
1518
Adam Sawicki647cf242018-11-23 17:58:00 +01001519 /*
1520 Allocation that must be move to an overlapping place using memmove().
1521 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1522 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001523 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001524 {
1525 AllocInfo allocInfo[2];
1526
1527 bufCreateInfo.size = BUF_SIZE;
1528 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1529 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1530 bufCreateInfo.size = biggerBufSize;
1531 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1532
1533 DestroyAllocation(allocInfo[0]);
1534
1535 VmaDefragmentationStats defragStats;
1536 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1537 // If this fails, it means we couldn't do memmove with overlapping regions.
1538 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1539
1540 ValidateAllocationsData(&allocInfo[1], 1);
1541 DestroyAllocation(allocInfo[1]);
1542 }
1543
Adam Sawickib8333fb2018-03-13 16:15:53 +01001544 vmaDestroyPool(g_hAllocator, pool);
1545}
1546
Adam Sawicki52076eb2018-11-22 16:14:50 +01001547void TestDefragmentationWholePool()
1548{
1549 wprintf(L"Test defragmentation whole pool\n");
1550
1551 RandomNumberGenerator rand(668);
1552
1553 const VkDeviceSize BUF_SIZE = 0x10000;
1554 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1555
1556 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1557 bufCreateInfo.size = BUF_SIZE;
1558 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1559
1560 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1561 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1562
1563 uint32_t memTypeIndex = UINT32_MAX;
1564 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1565
1566 VmaPoolCreateInfo poolCreateInfo = {};
1567 poolCreateInfo.blockSize = BLOCK_SIZE;
1568 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1569
1570 VmaDefragmentationStats defragStats[2];
1571 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1572 {
1573 VmaPool pool;
1574 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1575
1576 std::vector<AllocInfo> allocations;
1577
1578 // Buffers of fixed size.
1579 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1580 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1581 {
1582 AllocInfo allocInfo;
1583 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1584 allocations.push_back(allocInfo);
1585 }
1586
1587 for(size_t i = 1; i < allocations.size(); ++i)
1588 {
1589 DestroyAllocation(allocations[i]);
1590 allocations.erase(allocations.begin() + i);
1591 }
1592
1593 VmaDefragmentationInfo2 defragInfo = {};
1594 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1595 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1596 std::vector<VmaAllocation> allocationsToDefrag;
1597 if(caseIndex == 0)
1598 {
1599 defragInfo.poolCount = 1;
1600 defragInfo.pPools = &pool;
1601 }
1602 else
1603 {
1604 const size_t allocCount = allocations.size();
1605 allocationsToDefrag.resize(allocCount);
1606 std::transform(
1607 allocations.begin(), allocations.end(),
1608 allocationsToDefrag.begin(),
1609 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1610 defragInfo.allocationCount = (uint32_t)allocCount;
1611 defragInfo.pAllocations = allocationsToDefrag.data();
1612 }
1613
1614 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1615 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1616 TEST(res >= VK_SUCCESS);
1617 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1618
1619 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1620
1621 ValidateAllocationsData(allocations.data(), allocations.size());
1622
1623 DestroyAllAllocations(allocations);
1624
1625 vmaDestroyPool(g_hAllocator, pool);
1626 }
1627
1628 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1629 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1630 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1631 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1632}
1633
Adam Sawickib8333fb2018-03-13 16:15:53 +01001634void TestDefragmentationFull()
1635{
1636 std::vector<AllocInfo> allocations;
1637
1638 // Create initial allocations.
1639 for(size_t i = 0; i < 400; ++i)
1640 {
1641 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001642 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001643 allocations.push_back(allocation);
1644 }
1645
1646 // Delete random allocations
1647 const size_t allocationsToDeletePercent = 80;
1648 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1649 for(size_t i = 0; i < allocationsToDelete; ++i)
1650 {
1651 size_t index = (size_t)rand() % allocations.size();
1652 DestroyAllocation(allocations[index]);
1653 allocations.erase(allocations.begin() + index);
1654 }
1655
1656 for(size_t i = 0; i < allocations.size(); ++i)
1657 ValidateAllocationData(allocations[i]);
1658
Adam Sawicki0667e332018-08-24 17:26:44 +02001659 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001660
1661 {
1662 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1663 for(size_t i = 0; i < allocations.size(); ++i)
1664 vmaAllocations[i] = allocations[i].m_Allocation;
1665
1666 const size_t nonMovablePercent = 0;
1667 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1668 for(size_t i = 0; i < nonMovableCount; ++i)
1669 {
1670 size_t index = (size_t)rand() % vmaAllocations.size();
1671 vmaAllocations.erase(vmaAllocations.begin() + index);
1672 }
1673
1674 const uint32_t defragCount = 1;
1675 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1676 {
1677 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1678
1679 VmaDefragmentationInfo defragmentationInfo;
1680 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1681 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1682
1683 wprintf(L"Defragmentation #%u\n", defragIndex);
1684
1685 time_point begTime = std::chrono::high_resolution_clock::now();
1686
1687 VmaDefragmentationStats stats;
1688 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001689 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001690
1691 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1692
1693 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1694 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1695 wprintf(L"Time: %.2f s\n", defragmentDuration);
1696
1697 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1698 {
1699 if(allocationsChanged[i])
1700 {
1701 RecreateAllocationResource(allocations[i]);
1702 }
1703 }
1704
1705 for(size_t i = 0; i < allocations.size(); ++i)
1706 ValidateAllocationData(allocations[i]);
1707
Adam Sawicki0667e332018-08-24 17:26:44 +02001708 //wchar_t fileName[MAX_PATH];
1709 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1710 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001711 }
1712 }
1713
1714 // Destroy all remaining allocations.
1715 DestroyAllAllocations(allocations);
1716}
1717
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001718static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001719{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001720 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001721 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001722
1723 std::vector<AllocInfo> allocations;
1724
1725 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001726 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1727 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001728 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001729 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1730 const size_t percentToLeave = 30;
1731 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001732 RandomNumberGenerator rand = { 234522 };
1733
1734 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001735
1736 VmaAllocationCreateInfo allocCreateInfo = {};
1737 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001738 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001739
1740 // Create all intended buffers.
1741 for(size_t i = 0; i < bufCount; ++i)
1742 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001743 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1744
1745 if(rand.Generate() % 100 < percentNonMovable)
1746 {
1747 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1748 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1749 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1750 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1751 }
1752 else
1753 {
1754 // Different usage just to see different color in output from VmaDumpVis.
1755 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1756 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1757 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1758 // And in JSON dump.
1759 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1760 }
1761
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001762 AllocInfo alloc;
1763 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1764 alloc.m_StartValue = rand.Generate();
1765 allocations.push_back(alloc);
1766 }
1767
1768 // Destroy some percentage of them.
1769 {
1770 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1771 for(size_t i = 0; i < buffersToDestroy; ++i)
1772 {
1773 const size_t index = rand.Generate() % allocations.size();
1774 allocations[index].Destroy();
1775 allocations.erase(allocations.begin() + index);
1776 }
1777 }
1778
1779 // Fill them with meaningful data.
1780 UploadGpuData(allocations.data(), allocations.size());
1781
Adam Sawickic6ede152018-11-16 17:04:14 +01001782 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001783 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001784 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001785
1786 // Defragment using GPU only.
1787 {
1788 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001789
Adam Sawickic6ede152018-11-16 17:04:14 +01001790 std::vector<VmaAllocation> allocationPtrs;
1791 std::vector<VkBool32> allocationChanged;
1792 std::vector<size_t> allocationOriginalIndex;
1793
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001794 for(size_t i = 0; i < allocCount; ++i)
1795 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001796 VmaAllocationInfo allocInfo = {};
1797 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1798 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1799 {
1800 allocationPtrs.push_back(allocations[i].m_Allocation);
1801 allocationChanged.push_back(VK_FALSE);
1802 allocationOriginalIndex.push_back(i);
1803 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001804 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001805
1806 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001807
1808 BeginSingleTimeCommands();
1809
1810 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001811 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001812 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001813 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001814 defragInfo.pAllocationsChanged = allocationChanged.data();
1815 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001816 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1817 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1818
1819 VmaDefragmentationStats stats = {};
1820 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1821 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1822 TEST(res >= VK_SUCCESS);
1823
1824 EndSingleTimeCommands();
1825
1826 vmaDefragmentationEnd(g_hAllocator, ctx);
1827
Adam Sawickic6ede152018-11-16 17:04:14 +01001828 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001829 {
1830 if(allocationChanged[i])
1831 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001832 const size_t origAllocIndex = allocationOriginalIndex[i];
1833 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001834 }
1835 }
1836
Adam Sawicki4d844e22019-01-24 16:21:05 +01001837 // If corruption detection is enabled, GPU defragmentation may not work on
1838 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001839 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001840 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1841 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001842 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001843 }
1844
1845 ValidateGpuData(allocations.data(), allocations.size());
1846
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001847 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001848 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001849
1850 // Destroy all remaining buffers.
1851 for(size_t i = allocations.size(); i--; )
1852 {
1853 allocations[i].Destroy();
1854 }
Adam Sawicki05704002018-11-08 16:07:29 +01001855
1856 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001857}
1858
Adam Sawickic467e282019-12-23 16:38:31 +01001859static void ProcessDefragmentationStepInfo(VmaDefragmentationPassInfo &stepInfo)
Adam Sawickia52012d2019-12-23 15:28:51 +01001860{
1861 std::vector<VkImageMemoryBarrier> beginImageBarriers;
1862 std::vector<VkImageMemoryBarrier> finalizeImageBarriers;
1863
1864 VkPipelineStageFlags beginSrcStageMask = 0;
1865 VkPipelineStageFlags beginDstStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT;
1866
1867 VkPipelineStageFlags finalizeSrcStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT;
1868 VkPipelineStageFlags finalizeDstStageMask = 0;
1869
1870 bool wantsMemoryBarrier = false;
1871
1872 VkMemoryBarrier beginMemoryBarrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER };
1873 VkMemoryBarrier finalizeMemoryBarrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER };
1874
Adam Sawickic467e282019-12-23 16:38:31 +01001875 for(uint32_t i = 0; i < stepInfo.moveCount; ++i)
Adam Sawickia52012d2019-12-23 15:28:51 +01001876 {
1877 VmaAllocationInfo info;
1878 vmaGetAllocationInfo(g_hAllocator, stepInfo.pMoves[i].allocation, &info);
1879
1880 AllocInfo *allocInfo = (AllocInfo *)info.pUserData;
1881
1882 if(allocInfo->m_Image)
1883 {
1884 VkImage newImage;
1885
1886 const VkResult result = vkCreateImage(g_hDevice, &allocInfo->m_ImageInfo, g_Allocs, &newImage);
1887 TEST(result >= VK_SUCCESS);
1888
1889 vkBindImageMemory(g_hDevice, newImage, stepInfo.pMoves[i].memory, stepInfo.pMoves[i].offset);
Adam Sawickic467e282019-12-23 16:38:31 +01001890 allocInfo->m_NewImage = newImage;
Adam Sawickia52012d2019-12-23 15:28:51 +01001891
1892 // Keep track of our pipeline stages that we need to wait/signal on
1893 beginSrcStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1894 finalizeDstStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1895
1896 // We need one pipeline barrier and two image layout transitions here
1897 // First we'll have to turn our newly created image into VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
1898 // And the second one is turning the old image into VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
1899
1900 VkImageSubresourceRange subresourceRange = {
1901 VK_IMAGE_ASPECT_COLOR_BIT,
1902 0, VK_REMAINING_MIP_LEVELS,
1903 0, VK_REMAINING_ARRAY_LAYERS
1904 };
1905
1906 VkImageMemoryBarrier barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
1907 barrier.srcAccessMask = 0;
1908 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1909 barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1910 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1911 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1912 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1913 barrier.image = newImage;
1914 barrier.subresourceRange = subresourceRange;
1915
1916 beginImageBarriers.push_back(barrier);
1917
1918 // Second barrier to convert the existing image. This one actually needs a real barrier
1919 barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT;
1920 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1921 barrier.oldLayout = allocInfo->m_ImageLayout;
1922 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
1923 barrier.image = allocInfo->m_Image;
1924
1925 beginImageBarriers.push_back(barrier);
1926
1927 // And lastly we need a barrier that turns our new image into the layout of the old one
1928 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1929 barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
1930 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1931 barrier.newLayout = allocInfo->m_ImageLayout;
1932 barrier.image = newImage;
1933
1934 finalizeImageBarriers.push_back(barrier);
1935 }
1936 else if(allocInfo->m_Buffer)
1937 {
1938 VkBuffer newBuffer;
1939
1940 const VkResult result = vkCreateBuffer(g_hDevice, &allocInfo->m_BufferInfo, g_Allocs, &newBuffer);
1941 TEST(result >= VK_SUCCESS);
1942
1943 vkBindBufferMemory(g_hDevice, newBuffer, stepInfo.pMoves[i].memory, stepInfo.pMoves[i].offset);
Adam Sawickic467e282019-12-23 16:38:31 +01001944 allocInfo->m_NewBuffer = newBuffer;
Adam Sawickia52012d2019-12-23 15:28:51 +01001945
1946 // Keep track of our pipeline stages that we need to wait/signal on
1947 beginSrcStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1948 finalizeDstStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1949
1950 beginMemoryBarrier.srcAccessMask |= VK_ACCESS_MEMORY_WRITE_BIT;
1951 beginMemoryBarrier.dstAccessMask |= VK_ACCESS_TRANSFER_READ_BIT;
1952
1953 finalizeMemoryBarrier.srcAccessMask |= VK_ACCESS_TRANSFER_WRITE_BIT;
1954 finalizeMemoryBarrier.dstAccessMask |= VK_ACCESS_MEMORY_READ_BIT;
1955
1956 wantsMemoryBarrier = true;
1957 }
1958 }
1959
1960 if(!beginImageBarriers.empty() || wantsMemoryBarrier)
1961 {
1962 const uint32_t memoryBarrierCount = wantsMemoryBarrier ? 1 : 0;
1963
1964 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, beginSrcStageMask, beginDstStageMask, 0,
1965 memoryBarrierCount, &beginMemoryBarrier,
1966 0, nullptr,
1967 (uint32_t)beginImageBarriers.size(), beginImageBarriers.data());
1968 }
1969
1970 for(uint32_t i = 0; i < stepInfo.moveCount; ++ i)
1971 {
1972 VmaAllocationInfo info;
1973 vmaGetAllocationInfo(g_hAllocator, stepInfo.pMoves[i].allocation, &info);
1974
1975 AllocInfo *allocInfo = (AllocInfo *)info.pUserData;
1976
1977 if(allocInfo->m_Image)
1978 {
1979 std::vector<VkImageCopy> imageCopies;
1980
1981 // Copy all mips of the source image into the target image
1982 VkOffset3D offset = { 0, 0, 0 };
1983 VkExtent3D extent = allocInfo->m_ImageInfo.extent;
1984
1985 VkImageSubresourceLayers subresourceLayers = {
1986 VK_IMAGE_ASPECT_COLOR_BIT,
1987 0,
1988 0, 1
1989 };
1990
1991 for(uint32_t mip = 0; mip < allocInfo->m_ImageInfo.mipLevels; ++ mip)
1992 {
1993 subresourceLayers.mipLevel = mip;
1994
1995 VkImageCopy imageCopy{
1996 subresourceLayers,
1997 offset,
1998 subresourceLayers,
1999 offset,
2000 extent
2001 };
2002
2003 imageCopies.push_back(imageCopy);
2004
2005 extent.width = std::max(uint32_t(1), extent.width >> 1);
2006 extent.height = std::max(uint32_t(1), extent.height >> 1);
2007 extent.depth = std::max(uint32_t(1), extent.depth >> 1);
2008 }
2009
2010 vkCmdCopyImage(
2011 g_hTemporaryCommandBuffer,
2012 allocInfo->m_Image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
Adam Sawickic467e282019-12-23 16:38:31 +01002013 allocInfo->m_NewImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
Adam Sawickia52012d2019-12-23 15:28:51 +01002014 (uint32_t)imageCopies.size(), imageCopies.data());
Adam Sawickia52012d2019-12-23 15:28:51 +01002015 }
2016 else if(allocInfo->m_Buffer)
2017 {
2018 VkBufferCopy region = {
2019 0,
2020 0,
2021 allocInfo->m_BufferInfo.size };
2022
2023 vkCmdCopyBuffer(g_hTemporaryCommandBuffer,
Adam Sawickic467e282019-12-23 16:38:31 +01002024 allocInfo->m_Buffer, allocInfo->m_NewBuffer,
Adam Sawickia52012d2019-12-23 15:28:51 +01002025 1, &region);
Adam Sawickia52012d2019-12-23 15:28:51 +01002026 }
2027 }
2028
Adam Sawickia52012d2019-12-23 15:28:51 +01002029 if(!finalizeImageBarriers.empty() || wantsMemoryBarrier)
2030 {
2031 const uint32_t memoryBarrierCount = wantsMemoryBarrier ? 1 : 0;
2032
2033 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, finalizeSrcStageMask, finalizeDstStageMask, 0,
2034 memoryBarrierCount, &finalizeMemoryBarrier,
2035 0, nullptr,
2036 (uint32_t)finalizeImageBarriers.size(), finalizeImageBarriers.data());
2037 }
2038}
2039
2040
2041static void TestDefragmentationIncrementalBasic()
2042{
2043 wprintf(L"Test defragmentation incremental basic\n");
2044 g_MemoryAliasingWarningEnabled = false;
2045
2046 std::vector<AllocInfo> allocations;
2047
2048 // Create that many allocations to surely fill 3 new blocks of 256 MB.
2049 const std::array<uint32_t, 3> imageSizes = { 256, 512, 1024 };
2050 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
2051 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
2052 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic467e282019-12-23 16:38:31 +01002053 const size_t imageCount = totalSize / ((size_t)imageSizes[0] * imageSizes[0] * 4) / 2;
Adam Sawickia52012d2019-12-23 15:28:51 +01002054 const size_t bufCount = (size_t)(totalSize / bufSizeMin) / 2;
2055 const size_t percentToLeave = 30;
2056 RandomNumberGenerator rand = { 234522 };
2057
2058 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
2059 imageInfo.imageType = VK_IMAGE_TYPE_2D;
2060 imageInfo.extent.depth = 1;
2061 imageInfo.mipLevels = 1;
2062 imageInfo.arrayLayers = 1;
2063 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
2064 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
2065 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2066 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2067 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2068
2069 VmaAllocationCreateInfo allocCreateInfo = {};
2070 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2071 allocCreateInfo.flags = 0;
2072
2073 // Create all intended images.
2074 for(size_t i = 0; i < imageCount; ++i)
2075 {
2076 const uint32_t size = imageSizes[rand.Generate() % 3];
2077
2078 imageInfo.extent.width = size;
2079 imageInfo.extent.height = size;
2080
2081 AllocInfo alloc;
2082 alloc.CreateImage(imageInfo, allocCreateInfo, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
2083 alloc.m_StartValue = 0;
2084
2085 allocations.push_back(alloc);
2086 }
2087
2088 // And all buffers
2089 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2090
2091 for(size_t i = 0; i < bufCount; ++i)
2092 {
2093 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
2094 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2095
2096 AllocInfo alloc;
2097 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
2098 alloc.m_StartValue = 0;
2099
2100 allocations.push_back(alloc);
2101 }
2102
2103 // Destroy some percentage of them.
2104 {
2105 const size_t allocationsToDestroy = round_div<size_t>((imageCount + bufCount) * (100 - percentToLeave), 100);
2106 for(size_t i = 0; i < allocationsToDestroy; ++i)
2107 {
2108 const size_t index = rand.Generate() % allocations.size();
2109 allocations[index].Destroy();
2110 allocations.erase(allocations.begin() + index);
2111 }
2112 }
2113
2114 {
2115 // Set our user data pointers. A real application should probably be more clever here
2116 const size_t allocationCount = allocations.size();
2117 for(size_t i = 0; i < allocationCount; ++i)
2118 {
2119 AllocInfo &alloc = allocations[i];
2120 vmaSetAllocationUserData(g_hAllocator, alloc.m_Allocation, &alloc);
2121 }
2122 }
2123
2124 // Fill them with meaningful data.
2125 UploadGpuData(allocations.data(), allocations.size());
2126
2127 wchar_t fileName[MAX_PATH];
2128 swprintf_s(fileName, L"GPU_defragmentation_incremental_basic_A_before.json");
2129 SaveAllocatorStatsToFile(fileName);
2130
2131 // Defragment using GPU only.
2132 {
2133 const size_t allocCount = allocations.size();
2134
2135 std::vector<VmaAllocation> allocationPtrs;
2136
2137 for(size_t i = 0; i < allocCount; ++i)
2138 {
Adam Sawickia52012d2019-12-23 15:28:51 +01002139 allocationPtrs.push_back(allocations[i].m_Allocation);
2140 }
2141
2142 const size_t movableAllocCount = allocationPtrs.size();
2143
2144 VmaDefragmentationInfo2 defragInfo = {};
2145 defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_INCREMENTAL;
2146 defragInfo.allocationCount = (uint32_t)movableAllocCount;
2147 defragInfo.pAllocations = allocationPtrs.data();
2148 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
2149 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
2150
2151 VmaDefragmentationStats stats = {};
2152 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
2153 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
2154 TEST(res >= VK_SUCCESS);
2155
2156 res = VK_NOT_READY;
2157
Adam Sawickic467e282019-12-23 16:38:31 +01002158 std::vector<VmaDefragmentationPassMoveInfo> moveInfo;
Adam Sawickia52012d2019-12-23 15:28:51 +01002159 moveInfo.resize(movableAllocCount);
2160
2161 while(res == VK_NOT_READY)
2162 {
Adam Sawickic467e282019-12-23 16:38:31 +01002163 VmaDefragmentationPassInfo stepInfo = {};
Adam Sawickia52012d2019-12-23 15:28:51 +01002164 stepInfo.pMoves = moveInfo.data();
2165 stepInfo.moveCount = (uint32_t)moveInfo.size();
2166
Adam Sawickic467e282019-12-23 16:38:31 +01002167 res = vmaBeginDefragmentationPass(g_hAllocator, ctx, &stepInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +01002168 TEST(res >= VK_SUCCESS);
2169
2170 BeginSingleTimeCommands();
Adam Sawickic467e282019-12-23 16:38:31 +01002171 std::vector<void*> newHandles;
Adam Sawickia52012d2019-12-23 15:28:51 +01002172 ProcessDefragmentationStepInfo(stepInfo);
2173 EndSingleTimeCommands();
2174
Adam Sawickic467e282019-12-23 16:38:31 +01002175 res = vmaEndDefragmentationPass(g_hAllocator, ctx);
2176
2177 // Destroy old buffers/images and replace them with new handles.
2178 for(size_t i = 0; i < stepInfo.moveCount; ++i)
2179 {
2180 VmaAllocation const alloc = stepInfo.pMoves[i].allocation;
2181 VmaAllocationInfo vmaAllocInfo;
2182 vmaGetAllocationInfo(g_hAllocator, alloc, &vmaAllocInfo);
2183 AllocInfo* allocInfo = (AllocInfo*)vmaAllocInfo.pUserData;
2184 if(allocInfo->m_Buffer)
2185 {
2186 assert(allocInfo->m_NewBuffer && !allocInfo->m_Image && !allocInfo->m_NewImage);
2187 vkDestroyBuffer(g_hDevice, allocInfo->m_Buffer, g_Allocs);
2188 allocInfo->m_Buffer = allocInfo->m_NewBuffer;
2189 allocInfo->m_NewBuffer = VK_NULL_HANDLE;
2190 }
2191 else if(allocInfo->m_Image)
2192 {
2193 assert(allocInfo->m_NewImage && !allocInfo->m_Buffer && !allocInfo->m_NewBuffer);
2194 vkDestroyImage(g_hDevice, allocInfo->m_Image, g_Allocs);
2195 allocInfo->m_Image = allocInfo->m_NewImage;
2196 allocInfo->m_NewImage = VK_NULL_HANDLE;
2197 }
2198 else
2199 assert(0);
2200 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002201 }
2202
2203 TEST(res >= VK_SUCCESS);
2204 vmaDefragmentationEnd(g_hAllocator, ctx);
2205
2206 // If corruption detection is enabled, GPU defragmentation may not work on
2207 // memory types that have this detection active, e.g. on Intel.
2208#if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
2209 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
2210 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
2211#endif
2212 }
2213
2214 //ValidateGpuData(allocations.data(), allocations.size());
2215
2216 swprintf_s(fileName, L"GPU_defragmentation_incremental_basic_B_after.json");
2217 SaveAllocatorStatsToFile(fileName);
2218
Adam Sawickic467e282019-12-23 16:38:31 +01002219 // Destroy all remaining buffers and images.
Adam Sawickia52012d2019-12-23 15:28:51 +01002220 for(size_t i = allocations.size(); i--; )
2221 {
2222 allocations[i].Destroy();
2223 }
2224
2225 g_MemoryAliasingWarningEnabled = true;
2226}
2227
2228void TestDefragmentationIncrementalComplex()
2229{
2230 wprintf(L"Test defragmentation incremental complex\n");
2231 g_MemoryAliasingWarningEnabled = false;
2232
2233 std::vector<AllocInfo> allocations;
2234
2235 // Create that many allocations to surely fill 3 new blocks of 256 MB.
2236 const std::array<uint32_t, 3> imageSizes = { 256, 512, 1024 };
2237 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
2238 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
2239 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
2240 const size_t imageCount = (size_t)(totalSize / (imageSizes[0] * imageSizes[0] * 4)) / 2;
2241 const size_t bufCount = (size_t)(totalSize / bufSizeMin) / 2;
2242 const size_t percentToLeave = 30;
2243 RandomNumberGenerator rand = { 234522 };
2244
2245 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
2246 imageInfo.imageType = VK_IMAGE_TYPE_2D;
2247 imageInfo.extent.depth = 1;
2248 imageInfo.mipLevels = 1;
2249 imageInfo.arrayLayers = 1;
2250 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
2251 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
2252 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2253 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2254 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2255
2256 VmaAllocationCreateInfo allocCreateInfo = {};
2257 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2258 allocCreateInfo.flags = 0;
2259
2260 // Create all intended images.
2261 for(size_t i = 0; i < imageCount; ++i)
2262 {
2263 const uint32_t size = imageSizes[rand.Generate() % 3];
2264
2265 imageInfo.extent.width = size;
2266 imageInfo.extent.height = size;
2267
2268 AllocInfo alloc;
2269 alloc.CreateImage(imageInfo, allocCreateInfo, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
2270 alloc.m_StartValue = 0;
2271
2272 allocations.push_back(alloc);
2273 }
2274
2275 // And all buffers
2276 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2277
2278 for(size_t i = 0; i < bufCount; ++i)
2279 {
2280 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
2281 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2282
2283 AllocInfo alloc;
2284 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
2285 alloc.m_StartValue = 0;
2286
2287 allocations.push_back(alloc);
2288 }
2289
2290 // Destroy some percentage of them.
2291 {
2292 const size_t allocationsToDestroy = round_div<size_t>((imageCount + bufCount) * (100 - percentToLeave), 100);
2293 for(size_t i = 0; i < allocationsToDestroy; ++i)
2294 {
2295 const size_t index = rand.Generate() % allocations.size();
2296 allocations[index].Destroy();
2297 allocations.erase(allocations.begin() + index);
2298 }
2299 }
2300
2301 {
2302 // Set our user data pointers. A real application should probably be more clever here
2303 const size_t allocationCount = allocations.size();
2304 for(size_t i = 0; i < allocationCount; ++i)
2305 {
2306 AllocInfo &alloc = allocations[i];
2307 vmaSetAllocationUserData(g_hAllocator, alloc.m_Allocation, &alloc);
2308 }
2309 }
2310
2311 // Fill them with meaningful data.
2312 UploadGpuData(allocations.data(), allocations.size());
2313
2314 wchar_t fileName[MAX_PATH];
2315 swprintf_s(fileName, L"GPU_defragmentation_incremental_complex_A_before.json");
2316 SaveAllocatorStatsToFile(fileName);
2317
2318 std::vector<AllocInfo> additionalAllocations;
2319
2320#define MakeAdditionalAllocation() \
2321 do { \
2322 { \
2323 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16); \
2324 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT; \
2325 \
2326 AllocInfo alloc; \
2327 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo); \
2328 \
2329 additionalAllocations.push_back(alloc); \
2330 } \
2331 } while(0)
2332
2333 // Defragment using GPU only.
2334 {
2335 const size_t allocCount = allocations.size();
2336
2337 std::vector<VmaAllocation> allocationPtrs;
2338
2339 for(size_t i = 0; i < allocCount; ++i)
2340 {
2341 VmaAllocationInfo allocInfo = {};
2342 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
2343
2344 allocationPtrs.push_back(allocations[i].m_Allocation);
2345 }
2346
2347 const size_t movableAllocCount = allocationPtrs.size();
2348
2349 VmaDefragmentationInfo2 defragInfo = {};
2350 defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_INCREMENTAL;
2351 defragInfo.allocationCount = (uint32_t)movableAllocCount;
2352 defragInfo.pAllocations = allocationPtrs.data();
2353 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
2354 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
2355
2356 VmaDefragmentationStats stats = {};
2357 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
2358 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
2359 TEST(res >= VK_SUCCESS);
2360
2361 res = VK_NOT_READY;
2362
Adam Sawickic467e282019-12-23 16:38:31 +01002363 std::vector<VmaDefragmentationPassMoveInfo> moveInfo;
Adam Sawickia52012d2019-12-23 15:28:51 +01002364 moveInfo.resize(movableAllocCount);
2365
2366 MakeAdditionalAllocation();
2367
2368 while(res == VK_NOT_READY)
2369 {
Adam Sawickic467e282019-12-23 16:38:31 +01002370 VmaDefragmentationPassInfo stepInfo = {};
Adam Sawickia52012d2019-12-23 15:28:51 +01002371 stepInfo.pMoves = moveInfo.data();
2372 stepInfo.moveCount = (uint32_t)moveInfo.size();
2373
Adam Sawickic467e282019-12-23 16:38:31 +01002374 res = vmaBeginDefragmentationPass(g_hAllocator, ctx, &stepInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +01002375 TEST(res >= VK_SUCCESS);
2376
2377 MakeAdditionalAllocation();
2378
2379 BeginSingleTimeCommands();
2380 ProcessDefragmentationStepInfo(stepInfo);
2381 EndSingleTimeCommands();
2382
Adam Sawickic467e282019-12-23 16:38:31 +01002383 res = vmaEndDefragmentationPass(g_hAllocator, ctx);
2384
2385 // Destroy old buffers/images and replace them with new handles.
2386 for(size_t i = 0; i < stepInfo.moveCount; ++i)
2387 {
2388 VmaAllocation const alloc = stepInfo.pMoves[i].allocation;
2389 VmaAllocationInfo vmaAllocInfo;
2390 vmaGetAllocationInfo(g_hAllocator, alloc, &vmaAllocInfo);
2391 AllocInfo* allocInfo = (AllocInfo*)vmaAllocInfo.pUserData;
2392 if(allocInfo->m_Buffer)
2393 {
2394 assert(allocInfo->m_NewBuffer && !allocInfo->m_Image && !allocInfo->m_NewImage);
2395 vkDestroyBuffer(g_hDevice, allocInfo->m_Buffer, g_Allocs);
2396 allocInfo->m_Buffer = allocInfo->m_NewBuffer;
2397 allocInfo->m_NewBuffer = VK_NULL_HANDLE;
2398 }
2399 else if(allocInfo->m_Image)
2400 {
2401 assert(allocInfo->m_NewImage && !allocInfo->m_Buffer && !allocInfo->m_NewBuffer);
2402 vkDestroyImage(g_hDevice, allocInfo->m_Image, g_Allocs);
2403 allocInfo->m_Image = allocInfo->m_NewImage;
2404 allocInfo->m_NewImage = VK_NULL_HANDLE;
2405 }
2406 else
2407 assert(0);
2408 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002409
2410 MakeAdditionalAllocation();
2411 }
2412
2413 TEST(res >= VK_SUCCESS);
2414 vmaDefragmentationEnd(g_hAllocator, ctx);
2415
2416 // If corruption detection is enabled, GPU defragmentation may not work on
2417 // memory types that have this detection active, e.g. on Intel.
2418#if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
2419 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
2420 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
2421#endif
2422 }
2423
2424 //ValidateGpuData(allocations.data(), allocations.size());
2425
2426 swprintf_s(fileName, L"GPU_defragmentation_incremental_complex_B_after.json");
2427 SaveAllocatorStatsToFile(fileName);
2428
2429 // Destroy all remaining buffers.
2430 for(size_t i = allocations.size(); i--; )
2431 {
2432 allocations[i].Destroy();
2433 }
2434
2435 for(size_t i = additionalAllocations.size(); i--; )
2436 {
2437 additionalAllocations[i].Destroy();
2438 }
2439
2440 g_MemoryAliasingWarningEnabled = true;
2441}
2442
2443
Adam Sawickib8333fb2018-03-13 16:15:53 +01002444static void TestUserData()
2445{
2446 VkResult res;
2447
2448 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2449 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
2450 bufCreateInfo.size = 0x10000;
2451
2452 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
2453 {
2454 // Opaque pointer
2455 {
2456
2457 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
2458 void* pointerToSomething = &res;
2459
2460 VmaAllocationCreateInfo allocCreateInfo = {};
2461 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2462 allocCreateInfo.pUserData = numberAsPointer;
2463 if(testIndex == 1)
2464 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2465
2466 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2467 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002468 TEST(res == VK_SUCCESS);
2469 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002470
2471 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002472 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002473
2474 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
2475 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002476 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002477
2478 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2479 }
2480
2481 // String
2482 {
2483 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
2484 const char* name2 = "2";
2485 const size_t name1Len = strlen(name1);
2486
2487 char* name1Buf = new char[name1Len + 1];
2488 strcpy_s(name1Buf, name1Len + 1, name1);
2489
2490 VmaAllocationCreateInfo allocCreateInfo = {};
2491 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2492 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
2493 allocCreateInfo.pUserData = name1Buf;
2494 if(testIndex == 1)
2495 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2496
2497 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2498 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002499 TEST(res == VK_SUCCESS);
2500 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
2501 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002502
2503 delete[] name1Buf;
2504
2505 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002506 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002507
2508 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
2509 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002510 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002511
2512 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
2513 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002514 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002515
2516 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2517 }
2518 }
2519}
2520
Adam Sawicki370ab182018-11-08 16:31:00 +01002521static void TestInvalidAllocations()
2522{
2523 VkResult res;
2524
2525 VmaAllocationCreateInfo allocCreateInfo = {};
2526 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2527
2528 // Try to allocate 0 bytes.
2529 {
2530 VkMemoryRequirements memReq = {};
2531 memReq.size = 0; // !!!
2532 memReq.alignment = 4;
2533 memReq.memoryTypeBits = UINT32_MAX;
2534 VmaAllocation alloc = VK_NULL_HANDLE;
2535 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
2536 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
2537 }
2538
2539 // Try to create buffer with size = 0.
2540 {
2541 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2542 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2543 bufCreateInfo.size = 0; // !!!
2544 VkBuffer buf = VK_NULL_HANDLE;
2545 VmaAllocation alloc = VK_NULL_HANDLE;
2546 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
2547 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
2548 }
2549
2550 // Try to create image with one dimension = 0.
2551 {
2552 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2553 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
2554 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
2555 imageCreateInfo.extent.width = 128;
2556 imageCreateInfo.extent.height = 0; // !!!
2557 imageCreateInfo.extent.depth = 1;
2558 imageCreateInfo.mipLevels = 1;
2559 imageCreateInfo.arrayLayers = 1;
2560 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2561 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
2562 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2563 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2564 VkImage image = VK_NULL_HANDLE;
2565 VmaAllocation alloc = VK_NULL_HANDLE;
2566 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
2567 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
2568 }
2569}
2570
Adam Sawickib8333fb2018-03-13 16:15:53 +01002571static void TestMemoryRequirements()
2572{
2573 VkResult res;
2574 VkBuffer buf;
2575 VmaAllocation alloc;
2576 VmaAllocationInfo allocInfo;
2577
2578 const VkPhysicalDeviceMemoryProperties* memProps;
2579 vmaGetMemoryProperties(g_hAllocator, &memProps);
2580
2581 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2582 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2583 bufInfo.size = 128;
2584
2585 VmaAllocationCreateInfo allocCreateInfo = {};
2586
2587 // No requirements.
2588 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002589 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002590 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2591
2592 // Usage.
2593 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2594 allocCreateInfo.requiredFlags = 0;
2595 allocCreateInfo.preferredFlags = 0;
2596 allocCreateInfo.memoryTypeBits = UINT32_MAX;
2597
2598 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002599 TEST(res == VK_SUCCESS);
2600 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002601 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2602
2603 // Required flags, preferred flags.
2604 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
2605 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2606 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
2607 allocCreateInfo.memoryTypeBits = 0;
2608
2609 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002610 TEST(res == VK_SUCCESS);
2611 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2612 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002613 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2614
2615 // memoryTypeBits.
2616 const uint32_t memType = allocInfo.memoryType;
2617 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2618 allocCreateInfo.requiredFlags = 0;
2619 allocCreateInfo.preferredFlags = 0;
2620 allocCreateInfo.memoryTypeBits = 1u << memType;
2621
2622 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002623 TEST(res == VK_SUCCESS);
2624 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002625 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2626
2627}
2628
2629static void TestBasics()
2630{
2631 VkResult res;
2632
2633 TestMemoryRequirements();
2634
2635 // Lost allocation
2636 {
2637 VmaAllocation alloc = VK_NULL_HANDLE;
2638 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002639 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002640
2641 VmaAllocationInfo allocInfo;
2642 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002643 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
2644 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002645
2646 vmaFreeMemory(g_hAllocator, alloc);
2647 }
2648
2649 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
2650 {
2651 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2652 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
2653 bufCreateInfo.size = 128;
2654
2655 VmaAllocationCreateInfo allocCreateInfo = {};
2656 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2657 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
2658
2659 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2660 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002661 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002662
2663 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2664
2665 // Same with OWN_MEMORY.
2666 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2667
2668 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002669 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002670
2671 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2672 }
2673
2674 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01002675
2676 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01002677}
2678
Adam Sawickiddcbf8c2019-11-22 15:22:42 +01002679static void TestPool_MinBlockCount()
2680{
2681#if defined(VMA_DEBUG_MARGIN) && VMA_DEBUG_MARGIN > 0
2682 return;
2683#endif
2684
2685 wprintf(L"Test Pool MinBlockCount\n");
2686 VkResult res;
2687
2688 static const VkDeviceSize ALLOC_SIZE = 512ull * 1024;
2689 static const VkDeviceSize BLOCK_SIZE = ALLOC_SIZE * 2; // Each block can fit 2 allocations.
2690
2691 VmaAllocationCreateInfo allocCreateInfo = {};
2692 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_COPY;
2693
2694 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2695 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2696 bufCreateInfo.size = ALLOC_SIZE;
2697
2698 VmaPoolCreateInfo poolCreateInfo = {};
2699 poolCreateInfo.blockSize = BLOCK_SIZE;
2700 poolCreateInfo.minBlockCount = 2; // At least 2 blocks always present.
2701 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
2702 TEST(res == VK_SUCCESS);
2703
2704 VmaPool pool = VK_NULL_HANDLE;
2705 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
2706 TEST(res == VK_SUCCESS && pool != VK_NULL_HANDLE);
2707
2708 // Check that there are 2 blocks preallocated as requested.
2709 VmaPoolStats begPoolStats = {};
2710 vmaGetPoolStats(g_hAllocator, pool, &begPoolStats);
2711 TEST(begPoolStats.blockCount == 2 && begPoolStats.allocationCount == 0 && begPoolStats.size == BLOCK_SIZE * 2);
2712
2713 // Allocate 5 buffers to create 3 blocks.
2714 static const uint32_t BUF_COUNT = 5;
2715 allocCreateInfo.pool = pool;
2716 std::vector<AllocInfo> allocs(BUF_COUNT);
2717 for(uint32_t i = 0; i < BUF_COUNT; ++i)
2718 {
2719 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &allocs[i].m_Buffer, &allocs[i].m_Allocation, nullptr);
2720 TEST(res == VK_SUCCESS && allocs[i].m_Buffer != VK_NULL_HANDLE && allocs[i].m_Allocation != VK_NULL_HANDLE);
2721 }
2722
2723 // Check that there are really 3 blocks.
2724 VmaPoolStats poolStats2 = {};
2725 vmaGetPoolStats(g_hAllocator, pool, &poolStats2);
2726 TEST(poolStats2.blockCount == 3 && poolStats2.allocationCount == BUF_COUNT && poolStats2.size == BLOCK_SIZE * 3);
2727
2728 // Free two first allocations to make one block empty.
2729 allocs[0].Destroy();
2730 allocs[1].Destroy();
2731
2732 // Check that there are still 3 blocks due to hysteresis.
2733 VmaPoolStats poolStats3 = {};
2734 vmaGetPoolStats(g_hAllocator, pool, &poolStats3);
2735 TEST(poolStats3.blockCount == 3 && poolStats3.allocationCount == BUF_COUNT - 2 && poolStats2.size == BLOCK_SIZE * 3);
2736
2737 // Free the last allocation to make second block empty.
2738 allocs[BUF_COUNT - 1].Destroy();
2739
2740 // Check that there are now 2 blocks only.
2741 VmaPoolStats poolStats4 = {};
2742 vmaGetPoolStats(g_hAllocator, pool, &poolStats4);
2743 TEST(poolStats4.blockCount == 2 && poolStats4.allocationCount == BUF_COUNT - 3 && poolStats4.size == BLOCK_SIZE * 2);
2744
2745 // Cleanup.
2746 for(size_t i = allocs.size(); i--; )
2747 {
2748 allocs[i].Destroy();
2749 }
2750 vmaDestroyPool(g_hAllocator, pool);
2751}
2752
Adam Sawickib8333fb2018-03-13 16:15:53 +01002753void TestHeapSizeLimit()
2754{
Adam Sawickib3f51102019-11-18 13:05:56 +01002755 const VkDeviceSize HEAP_SIZE_LIMIT = 200ull * 1024 * 1024; // 200 MB
2756 const VkDeviceSize BLOCK_SIZE = 20ull * 1024 * 1024; // 20 MB
Adam Sawickib8333fb2018-03-13 16:15:53 +01002757
2758 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
2759 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
2760 {
2761 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
2762 }
2763
2764 VmaAllocatorCreateInfo allocatorCreateInfo = {};
2765 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
2766 allocatorCreateInfo.device = g_hDevice;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002767 allocatorCreateInfo.instance = g_hVulkanInstance;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002768 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
2769
2770 VmaAllocator hAllocator;
2771 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002772 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002773
2774 struct Item
2775 {
2776 VkBuffer hBuf;
2777 VmaAllocation hAlloc;
2778 };
2779 std::vector<Item> items;
2780
2781 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2782 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2783
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002784 // 1. Allocate two blocks of dedicated memory, half the size of BLOCK_SIZE.
2785 VmaAllocationInfo dedicatedAllocInfo;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002786 {
2787 VmaAllocationCreateInfo allocCreateInfo = {};
2788 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2789 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2790
2791 bufCreateInfo.size = BLOCK_SIZE / 2;
2792
2793 for(size_t i = 0; i < 2; ++i)
2794 {
2795 Item item;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002796 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &dedicatedAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002797 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002798 items.push_back(item);
2799 }
2800 }
2801
2802 // Create pool to make sure allocations must be out of this memory type.
2803 VmaPoolCreateInfo poolCreateInfo = {};
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002804 poolCreateInfo.memoryTypeIndex = dedicatedAllocInfo.memoryType;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002805 poolCreateInfo.blockSize = BLOCK_SIZE;
2806
2807 VmaPool hPool;
2808 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002809 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002810
2811 // 2. Allocate normal buffers from all the remaining memory.
2812 {
2813 VmaAllocationCreateInfo allocCreateInfo = {};
2814 allocCreateInfo.pool = hPool;
2815
2816 bufCreateInfo.size = BLOCK_SIZE / 2;
2817
2818 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2819 for(size_t i = 0; i < bufCount; ++i)
2820 {
2821 Item item;
2822 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002823 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002824 items.push_back(item);
2825 }
2826 }
2827
2828 // 3. Allocation of one more (even small) buffer should fail.
2829 {
2830 VmaAllocationCreateInfo allocCreateInfo = {};
2831 allocCreateInfo.pool = hPool;
2832
2833 bufCreateInfo.size = 128;
2834
2835 VkBuffer hBuf;
2836 VmaAllocation hAlloc;
2837 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002838 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002839 }
2840
2841 // Destroy everything.
2842 for(size_t i = items.size(); i--; )
2843 {
2844 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2845 }
2846
2847 vmaDestroyPool(hAllocator, hPool);
2848
2849 vmaDestroyAllocator(hAllocator);
2850}
2851
Adam Sawicki212a4a62018-06-14 15:44:45 +02002852#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002853static void TestDebugMargin()
2854{
2855 if(VMA_DEBUG_MARGIN == 0)
2856 {
2857 return;
2858 }
2859
2860 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002861 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002862
2863 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002864 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002865
2866 // Create few buffers of different size.
2867 const size_t BUF_COUNT = 10;
2868 BufferInfo buffers[BUF_COUNT];
2869 VmaAllocationInfo allocInfo[BUF_COUNT];
2870 for(size_t i = 0; i < 10; ++i)
2871 {
2872 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002873 // Last one will be mapped.
2874 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002875
2876 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002877 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002878 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002879 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002880
2881 if(i == BUF_COUNT - 1)
2882 {
2883 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002884 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002885 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2886 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2887 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002888 }
2889
2890 // Check if their offsets preserve margin between them.
2891 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2892 {
2893 if(lhs.deviceMemory != rhs.deviceMemory)
2894 {
2895 return lhs.deviceMemory < rhs.deviceMemory;
2896 }
2897 return lhs.offset < rhs.offset;
2898 });
2899 for(size_t i = 1; i < BUF_COUNT; ++i)
2900 {
2901 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2902 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002903 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002904 }
2905 }
2906
Adam Sawicki212a4a62018-06-14 15:44:45 +02002907 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002908 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002909
Adam Sawicki73b16652018-06-11 16:39:25 +02002910 // Destroy all buffers.
2911 for(size_t i = BUF_COUNT; i--; )
2912 {
2913 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2914 }
2915}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002916#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002917
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002918static void TestLinearAllocator()
2919{
2920 wprintf(L"Test linear allocator\n");
2921
2922 RandomNumberGenerator rand{645332};
2923
2924 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2925 sampleBufCreateInfo.size = 1024; // Whatever.
2926 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2927
2928 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2929 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2930
2931 VmaPoolCreateInfo poolCreateInfo = {};
2932 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002933 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002934
Adam Sawickiee082772018-06-20 17:45:49 +02002935 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002936 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2937 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2938
2939 VmaPool pool = nullptr;
2940 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002941 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002942
2943 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2944
2945 VmaAllocationCreateInfo allocCreateInfo = {};
2946 allocCreateInfo.pool = pool;
2947
2948 constexpr size_t maxBufCount = 100;
2949 std::vector<BufferInfo> bufInfo;
2950
2951 constexpr VkDeviceSize bufSizeMin = 16;
2952 constexpr VkDeviceSize bufSizeMax = 1024;
2953 VmaAllocationInfo allocInfo;
2954 VkDeviceSize prevOffset = 0;
2955
2956 // Test one-time free.
2957 for(size_t i = 0; i < 2; ++i)
2958 {
2959 // Allocate number of buffers of varying size that surely fit into this block.
2960 VkDeviceSize bufSumSize = 0;
2961 for(size_t i = 0; i < maxBufCount; ++i)
2962 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002963 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002964 BufferInfo newBufInfo;
2965 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2966 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002967 TEST(res == VK_SUCCESS);
2968 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002969 bufInfo.push_back(newBufInfo);
2970 prevOffset = allocInfo.offset;
2971 bufSumSize += bufCreateInfo.size;
2972 }
2973
2974 // Validate pool stats.
2975 VmaPoolStats stats;
2976 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002977 TEST(stats.size == poolCreateInfo.blockSize);
2978 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2979 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002980
2981 // Destroy the buffers in random order.
2982 while(!bufInfo.empty())
2983 {
2984 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2985 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2986 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2987 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2988 }
2989 }
2990
2991 // Test stack.
2992 {
2993 // Allocate number of buffers of varying size that surely fit into this block.
2994 for(size_t i = 0; i < maxBufCount; ++i)
2995 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002996 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002997 BufferInfo newBufInfo;
2998 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2999 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003000 TEST(res == VK_SUCCESS);
3001 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003002 bufInfo.push_back(newBufInfo);
3003 prevOffset = allocInfo.offset;
3004 }
3005
3006 // Destroy few buffers from top of the stack.
3007 for(size_t i = 0; i < maxBufCount / 5; ++i)
3008 {
3009 const BufferInfo& currBufInfo = bufInfo.back();
3010 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3011 bufInfo.pop_back();
3012 }
3013
3014 // Create some more
3015 for(size_t i = 0; i < maxBufCount / 5; ++i)
3016 {
Adam Sawickifd366b62019-01-24 15:26:43 +01003017 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003018 BufferInfo newBufInfo;
3019 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3020 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003021 TEST(res == VK_SUCCESS);
3022 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003023 bufInfo.push_back(newBufInfo);
3024 prevOffset = allocInfo.offset;
3025 }
3026
3027 // Destroy the buffers in reverse order.
3028 while(!bufInfo.empty())
3029 {
3030 const BufferInfo& currBufInfo = bufInfo.back();
3031 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3032 bufInfo.pop_back();
3033 }
3034 }
3035
Adam Sawickiee082772018-06-20 17:45:49 +02003036 // Test ring buffer.
3037 {
3038 // Allocate number of buffers that surely fit into this block.
3039 bufCreateInfo.size = bufSizeMax;
3040 for(size_t i = 0; i < maxBufCount; ++i)
3041 {
3042 BufferInfo newBufInfo;
3043 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3044 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003045 TEST(res == VK_SUCCESS);
3046 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02003047 bufInfo.push_back(newBufInfo);
3048 prevOffset = allocInfo.offset;
3049 }
3050
3051 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
3052 const size_t buffersPerIter = maxBufCount / 10 - 1;
3053 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
3054 for(size_t iter = 0; iter < iterCount; ++iter)
3055 {
3056 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
3057 {
3058 const BufferInfo& currBufInfo = bufInfo.front();
3059 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3060 bufInfo.erase(bufInfo.begin());
3061 }
3062 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
3063 {
3064 BufferInfo newBufInfo;
3065 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3066 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003067 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02003068 bufInfo.push_back(newBufInfo);
3069 }
3070 }
3071
3072 // Allocate buffers until we reach out-of-memory.
3073 uint32_t debugIndex = 0;
3074 while(res == VK_SUCCESS)
3075 {
3076 BufferInfo newBufInfo;
3077 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3078 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3079 if(res == VK_SUCCESS)
3080 {
3081 bufInfo.push_back(newBufInfo);
3082 }
3083 else
3084 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003085 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02003086 }
3087 ++debugIndex;
3088 }
3089
3090 // Destroy the buffers in random order.
3091 while(!bufInfo.empty())
3092 {
3093 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3094 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3095 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3096 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3097 }
3098 }
3099
Adam Sawicki680b2252018-08-22 14:47:32 +02003100 // Test double stack.
3101 {
3102 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
3103 VkDeviceSize prevOffsetLower = 0;
3104 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
3105 for(size_t i = 0; i < maxBufCount; ++i)
3106 {
3107 const bool upperAddress = (i % 2) != 0;
3108 if(upperAddress)
3109 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3110 else
3111 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003112 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003113 BufferInfo newBufInfo;
3114 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3115 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003116 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02003117 if(upperAddress)
3118 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003119 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003120 prevOffsetUpper = allocInfo.offset;
3121 }
3122 else
3123 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003124 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02003125 prevOffsetLower = allocInfo.offset;
3126 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003127 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003128 bufInfo.push_back(newBufInfo);
3129 }
3130
3131 // Destroy few buffers from top of the stack.
3132 for(size_t i = 0; i < maxBufCount / 5; ++i)
3133 {
3134 const BufferInfo& currBufInfo = bufInfo.back();
3135 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3136 bufInfo.pop_back();
3137 }
3138
3139 // Create some more
3140 for(size_t i = 0; i < maxBufCount / 5; ++i)
3141 {
3142 const bool upperAddress = (i % 2) != 0;
3143 if(upperAddress)
3144 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3145 else
3146 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003147 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003148 BufferInfo newBufInfo;
3149 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3150 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003151 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02003152 bufInfo.push_back(newBufInfo);
3153 }
3154
3155 // Destroy the buffers in reverse order.
3156 while(!bufInfo.empty())
3157 {
3158 const BufferInfo& currBufInfo = bufInfo.back();
3159 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3160 bufInfo.pop_back();
3161 }
3162
3163 // Create buffers on both sides until we reach out of memory.
3164 prevOffsetLower = 0;
3165 prevOffsetUpper = poolCreateInfo.blockSize;
3166 res = VK_SUCCESS;
3167 for(size_t i = 0; res == VK_SUCCESS; ++i)
3168 {
3169 const bool upperAddress = (i % 2) != 0;
3170 if(upperAddress)
3171 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3172 else
3173 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003174 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003175 BufferInfo newBufInfo;
3176 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3177 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3178 if(res == VK_SUCCESS)
3179 {
3180 if(upperAddress)
3181 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003182 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003183 prevOffsetUpper = allocInfo.offset;
3184 }
3185 else
3186 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003187 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02003188 prevOffsetLower = allocInfo.offset;
3189 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003190 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003191 bufInfo.push_back(newBufInfo);
3192 }
3193 }
3194
3195 // Destroy the buffers in random order.
3196 while(!bufInfo.empty())
3197 {
3198 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3199 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3200 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3201 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3202 }
3203
3204 // Create buffers on upper side only, constant size, until we reach out of memory.
3205 prevOffsetUpper = poolCreateInfo.blockSize;
3206 res = VK_SUCCESS;
3207 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3208 bufCreateInfo.size = bufSizeMax;
3209 for(size_t i = 0; res == VK_SUCCESS; ++i)
3210 {
3211 BufferInfo newBufInfo;
3212 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3213 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3214 if(res == VK_SUCCESS)
3215 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003216 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003217 prevOffsetUpper = allocInfo.offset;
3218 bufInfo.push_back(newBufInfo);
3219 }
3220 }
3221
3222 // Destroy the buffers in reverse order.
3223 while(!bufInfo.empty())
3224 {
3225 const BufferInfo& currBufInfo = bufInfo.back();
3226 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3227 bufInfo.pop_back();
3228 }
3229 }
3230
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003231 // Test ring buffer with lost allocations.
3232 {
3233 // Allocate number of buffers until pool is full.
3234 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
3235 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
3236 res = VK_SUCCESS;
3237 for(size_t i = 0; res == VK_SUCCESS; ++i)
3238 {
3239 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3240
Adam Sawickifd366b62019-01-24 15:26:43 +01003241 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003242
3243 BufferInfo newBufInfo;
3244 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3245 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3246 if(res == VK_SUCCESS)
3247 bufInfo.push_back(newBufInfo);
3248 }
3249
3250 // Free first half of it.
3251 {
3252 const size_t buffersToDelete = bufInfo.size() / 2;
3253 for(size_t i = 0; i < buffersToDelete; ++i)
3254 {
3255 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3256 }
3257 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
3258 }
3259
3260 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003261 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003262 res = VK_SUCCESS;
3263 for(size_t i = 0; res == VK_SUCCESS; ++i)
3264 {
3265 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3266
Adam Sawickifd366b62019-01-24 15:26:43 +01003267 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003268
3269 BufferInfo newBufInfo;
3270 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3271 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3272 if(res == VK_SUCCESS)
3273 bufInfo.push_back(newBufInfo);
3274 }
3275
3276 VkDeviceSize firstNewOffset;
3277 {
3278 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3279
3280 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
3281 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3282 bufCreateInfo.size = bufSizeMax;
3283
3284 BufferInfo newBufInfo;
3285 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3286 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003287 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003288 bufInfo.push_back(newBufInfo);
3289 firstNewOffset = allocInfo.offset;
3290
3291 // Make sure at least one buffer from the beginning became lost.
3292 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003293 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003294 }
3295
Adam Sawickifd366b62019-01-24 15:26:43 +01003296#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003297 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
3298 size_t newCount = 1;
3299 for(;;)
3300 {
3301 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3302
Adam Sawickifd366b62019-01-24 15:26:43 +01003303 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003304
3305 BufferInfo newBufInfo;
3306 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3307 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01003308
Adam Sawickib8d34d52018-10-03 17:41:20 +02003309 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003310 bufInfo.push_back(newBufInfo);
3311 ++newCount;
3312 if(allocInfo.offset < firstNewOffset)
3313 break;
3314 }
Adam Sawickifd366b62019-01-24 15:26:43 +01003315#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003316
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003317 // Delete buffers that are lost.
3318 for(size_t i = bufInfo.size(); i--; )
3319 {
3320 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
3321 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3322 {
3323 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3324 bufInfo.erase(bufInfo.begin() + i);
3325 }
3326 }
3327
3328 // Test vmaMakePoolAllocationsLost
3329 {
3330 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3331
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01003332 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003333 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003334 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003335
3336 size_t realLostAllocCount = 0;
3337 for(size_t i = 0; i < bufInfo.size(); ++i)
3338 {
3339 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
3340 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3341 ++realLostAllocCount;
3342 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003343 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003344 }
3345
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003346 // Destroy all the buffers in forward order.
3347 for(size_t i = 0; i < bufInfo.size(); ++i)
3348 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3349 bufInfo.clear();
3350 }
3351
Adam Sawicki70a683e2018-08-24 15:36:32 +02003352 vmaDestroyPool(g_hAllocator, pool);
3353}
Adam Sawickif799c4f2018-08-23 10:40:30 +02003354
Adam Sawicki70a683e2018-08-24 15:36:32 +02003355static void TestLinearAllocatorMultiBlock()
3356{
3357 wprintf(L"Test linear allocator multi block\n");
3358
3359 RandomNumberGenerator rand{345673};
3360
3361 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3362 sampleBufCreateInfo.size = 1024 * 1024;
3363 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3364
3365 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3366 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3367
3368 VmaPoolCreateInfo poolCreateInfo = {};
3369 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3370 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003371 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003372
3373 VmaPool pool = nullptr;
3374 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003375 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003376
3377 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
3378
3379 VmaAllocationCreateInfo allocCreateInfo = {};
3380 allocCreateInfo.pool = pool;
3381
3382 std::vector<BufferInfo> bufInfo;
3383 VmaAllocationInfo allocInfo;
3384
3385 // Test one-time free.
3386 {
3387 // Allocate buffers until we move to a second block.
3388 VkDeviceMemory lastMem = VK_NULL_HANDLE;
3389 for(uint32_t i = 0; ; ++i)
3390 {
3391 BufferInfo newBufInfo;
3392 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3393 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003394 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003395 bufInfo.push_back(newBufInfo);
3396 if(lastMem && allocInfo.deviceMemory != lastMem)
3397 {
3398 break;
3399 }
3400 lastMem = allocInfo.deviceMemory;
3401 }
3402
Adam Sawickib8d34d52018-10-03 17:41:20 +02003403 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003404
3405 // Make sure that pool has now two blocks.
3406 VmaPoolStats poolStats = {};
3407 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003408 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003409
3410 // Destroy all the buffers in random order.
3411 while(!bufInfo.empty())
3412 {
3413 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3414 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3415 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3416 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3417 }
3418
3419 // Make sure that pool has now at most one block.
3420 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003421 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003422 }
3423
3424 // Test stack.
3425 {
3426 // Allocate buffers until we move to a second block.
3427 VkDeviceMemory lastMem = VK_NULL_HANDLE;
3428 for(uint32_t i = 0; ; ++i)
3429 {
3430 BufferInfo newBufInfo;
3431 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3432 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003433 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003434 bufInfo.push_back(newBufInfo);
3435 if(lastMem && allocInfo.deviceMemory != lastMem)
3436 {
3437 break;
3438 }
3439 lastMem = allocInfo.deviceMemory;
3440 }
3441
Adam Sawickib8d34d52018-10-03 17:41:20 +02003442 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003443
3444 // Add few more buffers.
3445 for(uint32_t i = 0; i < 5; ++i)
3446 {
3447 BufferInfo newBufInfo;
3448 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3449 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003450 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003451 bufInfo.push_back(newBufInfo);
3452 }
3453
3454 // Make sure that pool has now two blocks.
3455 VmaPoolStats poolStats = {};
3456 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003457 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003458
3459 // Delete half of buffers, LIFO.
3460 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
3461 {
3462 const BufferInfo& currBufInfo = bufInfo.back();
3463 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3464 bufInfo.pop_back();
3465 }
3466
3467 // Add one more buffer.
3468 BufferInfo newBufInfo;
3469 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3470 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003471 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003472 bufInfo.push_back(newBufInfo);
3473
3474 // Make sure that pool has now one block.
3475 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003476 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003477
3478 // Delete all the remaining buffers, LIFO.
3479 while(!bufInfo.empty())
3480 {
3481 const BufferInfo& currBufInfo = bufInfo.back();
3482 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3483 bufInfo.pop_back();
3484 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02003485 }
3486
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003487 vmaDestroyPool(g_hAllocator, pool);
3488}
3489
Adam Sawickifd11d752018-08-22 15:02:10 +02003490static void ManuallyTestLinearAllocator()
3491{
3492 VmaStats origStats;
3493 vmaCalculateStats(g_hAllocator, &origStats);
3494
3495 wprintf(L"Manually test linear allocator\n");
3496
3497 RandomNumberGenerator rand{645332};
3498
3499 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3500 sampleBufCreateInfo.size = 1024; // Whatever.
3501 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
3502
3503 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3504 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3505
3506 VmaPoolCreateInfo poolCreateInfo = {};
3507 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003508 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003509
3510 poolCreateInfo.blockSize = 10 * 1024;
3511 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3512 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
3513
3514 VmaPool pool = nullptr;
3515 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003516 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003517
3518 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
3519
3520 VmaAllocationCreateInfo allocCreateInfo = {};
3521 allocCreateInfo.pool = pool;
3522
3523 std::vector<BufferInfo> bufInfo;
3524 VmaAllocationInfo allocInfo;
3525 BufferInfo newBufInfo;
3526
3527 // Test double stack.
3528 {
3529 /*
3530 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
3531 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
3532
3533 Totally:
3534 1 block allocated
3535 10240 Vulkan bytes
3536 6 new allocations
3537 2256 bytes in allocations
3538 */
3539
3540 bufCreateInfo.size = 32;
3541 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3542 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003543 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003544 bufInfo.push_back(newBufInfo);
3545
3546 bufCreateInfo.size = 1024;
3547 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3548 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003549 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003550 bufInfo.push_back(newBufInfo);
3551
3552 bufCreateInfo.size = 32;
3553 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3554 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003555 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003556 bufInfo.push_back(newBufInfo);
3557
3558 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3559
3560 bufCreateInfo.size = 128;
3561 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3562 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003563 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003564 bufInfo.push_back(newBufInfo);
3565
3566 bufCreateInfo.size = 1024;
3567 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3568 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003569 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003570 bufInfo.push_back(newBufInfo);
3571
3572 bufCreateInfo.size = 16;
3573 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3574 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003575 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003576 bufInfo.push_back(newBufInfo);
3577
3578 VmaStats currStats;
3579 vmaCalculateStats(g_hAllocator, &currStats);
3580 VmaPoolStats poolStats;
3581 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
3582
3583 char* statsStr = nullptr;
3584 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
3585
3586 // PUT BREAKPOINT HERE TO CHECK.
3587 // Inspect: currStats versus origStats, poolStats, statsStr.
3588 int I = 0;
3589
3590 vmaFreeStatsString(g_hAllocator, statsStr);
3591
3592 // Destroy the buffers in reverse order.
3593 while(!bufInfo.empty())
3594 {
3595 const BufferInfo& currBufInfo = bufInfo.back();
3596 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3597 bufInfo.pop_back();
3598 }
3599 }
3600
3601 vmaDestroyPool(g_hAllocator, pool);
3602}
3603
Adam Sawicki80927152018-09-07 17:27:23 +02003604static void BenchmarkAlgorithmsCase(FILE* file,
3605 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003606 bool empty,
3607 VmaAllocationCreateFlags allocStrategy,
3608 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02003609{
3610 RandomNumberGenerator rand{16223};
3611
3612 const VkDeviceSize bufSizeMin = 32;
3613 const VkDeviceSize bufSizeMax = 1024;
3614 const size_t maxBufCapacity = 10000;
3615 const uint32_t iterationCount = 10;
3616
3617 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3618 sampleBufCreateInfo.size = bufSizeMax;
3619 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
3620
3621 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3622 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3623
3624 VmaPoolCreateInfo poolCreateInfo = {};
3625 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003626 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003627
3628 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02003629 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02003630 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
3631
3632 VmaPool pool = nullptr;
3633 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003634 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003635
3636 // Buffer created just to get memory requirements. Never bound to any memory.
3637 VkBuffer dummyBuffer = VK_NULL_HANDLE;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003638 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003639 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02003640
3641 VkMemoryRequirements memReq = {};
3642 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3643
Adam Sawicki1f84f622019-07-02 13:40:01 +02003644 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawicki0a607132018-08-24 11:18:41 +02003645
3646 VmaAllocationCreateInfo allocCreateInfo = {};
3647 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003648 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02003649
3650 VmaAllocation alloc;
3651 std::vector<VmaAllocation> baseAllocations;
3652
3653 if(!empty)
3654 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003655 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02003656 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003657 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02003658 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01003659 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
3660 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02003661 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
3662 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003663 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003664 baseAllocations.push_back(alloc);
3665 totalSize += memReq.size;
3666 }
3667
3668 // Delete half of them, choose randomly.
3669 size_t allocsToDelete = baseAllocations.size() / 2;
3670 for(size_t i = 0; i < allocsToDelete; ++i)
3671 {
3672 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
3673 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
3674 baseAllocations.erase(baseAllocations.begin() + index);
3675 }
3676 }
3677
3678 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003679 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02003680 std::vector<VmaAllocation> testAllocations;
3681 testAllocations.reserve(allocCount);
3682 duration allocTotalDuration = duration::zero();
3683 duration freeTotalDuration = duration::zero();
3684 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
3685 {
3686 // Allocations
3687 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
3688 for(size_t i = 0; i < allocCount; ++i)
3689 {
3690 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
3691 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003692 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003693 testAllocations.push_back(alloc);
3694 }
3695 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
3696
3697 // Deallocations
3698 switch(freeOrder)
3699 {
3700 case FREE_ORDER::FORWARD:
3701 // Leave testAllocations unchanged.
3702 break;
3703 case FREE_ORDER::BACKWARD:
3704 std::reverse(testAllocations.begin(), testAllocations.end());
3705 break;
3706 case FREE_ORDER::RANDOM:
3707 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
3708 break;
3709 default: assert(0);
3710 }
3711
3712 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
3713 for(size_t i = 0; i < allocCount; ++i)
3714 vmaFreeMemory(g_hAllocator, testAllocations[i]);
3715 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
3716
3717 testAllocations.clear();
3718 }
3719
3720 // Delete baseAllocations
3721 while(!baseAllocations.empty())
3722 {
3723 vmaFreeMemory(g_hAllocator, baseAllocations.back());
3724 baseAllocations.pop_back();
3725 }
3726
3727 vmaDestroyPool(g_hAllocator, pool);
3728
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003729 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
3730 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
3731
Adam Sawicki80927152018-09-07 17:27:23 +02003732 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
3733 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02003734 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003735 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02003736 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003737 allocTotalSeconds,
3738 freeTotalSeconds);
3739
3740 if(file)
3741 {
3742 std::string currTime;
3743 CurrentTimeToStr(currTime);
3744
Adam Sawicki80927152018-09-07 17:27:23 +02003745 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003746 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02003747 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003748 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003749 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003750 FREE_ORDER_NAMES[(uint32_t)freeOrder],
3751 allocTotalSeconds,
3752 freeTotalSeconds);
3753 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003754}
3755
Adam Sawicki80927152018-09-07 17:27:23 +02003756static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02003757{
Adam Sawicki80927152018-09-07 17:27:23 +02003758 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02003759
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003760 if(file)
3761 {
3762 fprintf(file,
3763 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02003764 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003765 "Allocation time (s),Deallocation time (s)\n");
3766 }
3767
Adam Sawicki0a607132018-08-24 11:18:41 +02003768 uint32_t freeOrderCount = 1;
3769 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
3770 freeOrderCount = 3;
3771 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
3772 freeOrderCount = 2;
3773
3774 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003775 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003776
3777 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3778 {
3779 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3780 switch(freeOrderIndex)
3781 {
3782 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3783 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3784 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3785 default: assert(0);
3786 }
3787
3788 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3789 {
Adam Sawicki80927152018-09-07 17:27:23 +02003790 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003791 {
Adam Sawicki80927152018-09-07 17:27:23 +02003792 uint32_t algorithm = 0;
3793 switch(algorithmIndex)
3794 {
3795 case 0:
3796 break;
3797 case 1:
3798 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3799 break;
3800 case 2:
3801 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3802 break;
3803 default:
3804 assert(0);
3805 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003806
Adam Sawicki80927152018-09-07 17:27:23 +02003807 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003808 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3809 {
3810 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003811 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003812 {
3813 switch(allocStrategyIndex)
3814 {
3815 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3816 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3817 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3818 default: assert(0);
3819 }
3820 }
3821
Adam Sawicki80927152018-09-07 17:27:23 +02003822 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003823 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003824 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003825 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003826 strategy,
3827 freeOrder); // freeOrder
3828 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003829 }
3830 }
3831 }
3832}
3833
Adam Sawickib8333fb2018-03-13 16:15:53 +01003834static void TestPool_SameSize()
3835{
3836 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3837 const size_t BUF_COUNT = 100;
3838 VkResult res;
3839
3840 RandomNumberGenerator rand{123};
3841
3842 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3843 bufferInfo.size = BUF_SIZE;
3844 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3845
3846 uint32_t memoryTypeBits = UINT32_MAX;
3847 {
3848 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003849 res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003850 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003851
3852 VkMemoryRequirements memReq;
3853 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3854 memoryTypeBits = memReq.memoryTypeBits;
3855
Adam Sawicki1f84f622019-07-02 13:40:01 +02003856 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003857 }
3858
3859 VmaAllocationCreateInfo poolAllocInfo = {};
3860 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3861 uint32_t memTypeIndex;
3862 res = vmaFindMemoryTypeIndex(
3863 g_hAllocator,
3864 memoryTypeBits,
3865 &poolAllocInfo,
3866 &memTypeIndex);
3867
3868 VmaPoolCreateInfo poolCreateInfo = {};
3869 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3870 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3871 poolCreateInfo.minBlockCount = 1;
3872 poolCreateInfo.maxBlockCount = 4;
3873 poolCreateInfo.frameInUseCount = 0;
3874
3875 VmaPool pool;
3876 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003877 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003878
Adam Sawickia020fb82019-11-02 14:43:06 +01003879 // Test pool name
3880 {
3881 static const char* const POOL_NAME = "Pool name";
3882 vmaSetPoolName(g_hAllocator, pool, POOL_NAME);
3883
3884 const char* fetchedPoolName = nullptr;
3885 vmaGetPoolName(g_hAllocator, pool, &fetchedPoolName);
3886 TEST(strcmp(fetchedPoolName, POOL_NAME) == 0);
3887
Adam Sawickia020fb82019-11-02 14:43:06 +01003888 vmaSetPoolName(g_hAllocator, pool, nullptr);
3889 }
3890
Adam Sawickib8333fb2018-03-13 16:15:53 +01003891 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3892
3893 VmaAllocationCreateInfo allocInfo = {};
3894 allocInfo.pool = pool;
3895 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3896 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3897
3898 struct BufItem
3899 {
3900 VkBuffer Buf;
3901 VmaAllocation Alloc;
3902 };
3903 std::vector<BufItem> items;
3904
3905 // Fill entire pool.
3906 for(size_t i = 0; i < BUF_COUNT; ++i)
3907 {
3908 BufItem item;
3909 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003910 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003911 items.push_back(item);
3912 }
3913
3914 // Make sure that another allocation would fail.
3915 {
3916 BufItem item;
3917 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003918 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003919 }
3920
3921 // Validate that no buffer is lost. Also check that they are not mapped.
3922 for(size_t i = 0; i < items.size(); ++i)
3923 {
3924 VmaAllocationInfo allocInfo;
3925 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003926 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3927 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003928 }
3929
3930 // Free some percent of random items.
3931 {
3932 const size_t PERCENT_TO_FREE = 10;
3933 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3934 for(size_t i = 0; i < itemsToFree; ++i)
3935 {
3936 size_t index = (size_t)rand.Generate() % items.size();
3937 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3938 items.erase(items.begin() + index);
3939 }
3940 }
3941
3942 // Randomly allocate and free items.
3943 {
3944 const size_t OPERATION_COUNT = BUF_COUNT;
3945 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3946 {
3947 bool allocate = rand.Generate() % 2 != 0;
3948 if(allocate)
3949 {
3950 if(items.size() < BUF_COUNT)
3951 {
3952 BufItem item;
3953 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003954 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003955 items.push_back(item);
3956 }
3957 }
3958 else // Free
3959 {
3960 if(!items.empty())
3961 {
3962 size_t index = (size_t)rand.Generate() % items.size();
3963 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3964 items.erase(items.begin() + index);
3965 }
3966 }
3967 }
3968 }
3969
3970 // Allocate up to maximum.
3971 while(items.size() < BUF_COUNT)
3972 {
3973 BufItem item;
3974 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003975 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003976 items.push_back(item);
3977 }
3978
3979 // Validate that no buffer is lost.
3980 for(size_t i = 0; i < items.size(); ++i)
3981 {
3982 VmaAllocationInfo allocInfo;
3983 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003984 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003985 }
3986
3987 // Next frame.
3988 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3989
3990 // Allocate another BUF_COUNT buffers.
3991 for(size_t i = 0; i < BUF_COUNT; ++i)
3992 {
3993 BufItem item;
3994 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003995 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003996 items.push_back(item);
3997 }
3998
3999 // Make sure the first BUF_COUNT is lost. Delete them.
4000 for(size_t i = 0; i < BUF_COUNT; ++i)
4001 {
4002 VmaAllocationInfo allocInfo;
4003 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004004 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004005 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4006 }
4007 items.erase(items.begin(), items.begin() + BUF_COUNT);
4008
4009 // Validate that no buffer is lost.
4010 for(size_t i = 0; i < items.size(); ++i)
4011 {
4012 VmaAllocationInfo allocInfo;
4013 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004014 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004015 }
4016
4017 // Free one item.
4018 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
4019 items.pop_back();
4020
4021 // Validate statistics.
4022 {
4023 VmaPoolStats poolStats = {};
4024 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004025 TEST(poolStats.allocationCount == items.size());
4026 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
4027 TEST(poolStats.unusedRangeCount == 1);
4028 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
4029 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004030 }
4031
4032 // Free all remaining items.
4033 for(size_t i = items.size(); i--; )
4034 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4035 items.clear();
4036
4037 // Allocate maximum items again.
4038 for(size_t i = 0; i < BUF_COUNT; ++i)
4039 {
4040 BufItem item;
4041 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004042 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004043 items.push_back(item);
4044 }
4045
4046 // Delete every other item.
4047 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
4048 {
4049 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4050 items.erase(items.begin() + i);
4051 }
4052
4053 // Defragment!
4054 {
4055 std::vector<VmaAllocation> allocationsToDefragment(items.size());
4056 for(size_t i = 0; i < items.size(); ++i)
4057 allocationsToDefragment[i] = items[i].Alloc;
4058
4059 VmaDefragmentationStats defragmentationStats;
4060 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004061 TEST(res == VK_SUCCESS);
4062 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004063 }
4064
4065 // Free all remaining items.
4066 for(size_t i = items.size(); i--; )
4067 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4068 items.clear();
4069
4070 ////////////////////////////////////////////////////////////////////////////////
4071 // Test for vmaMakePoolAllocationsLost
4072
4073 // Allocate 4 buffers on frame 10.
4074 vmaSetCurrentFrameIndex(g_hAllocator, 10);
4075 for(size_t i = 0; i < 4; ++i)
4076 {
4077 BufItem item;
4078 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004079 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004080 items.push_back(item);
4081 }
4082
4083 // Touch first 2 of them on frame 11.
4084 vmaSetCurrentFrameIndex(g_hAllocator, 11);
4085 for(size_t i = 0; i < 2; ++i)
4086 {
4087 VmaAllocationInfo allocInfo;
4088 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
4089 }
4090
4091 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
4092 size_t lostCount = 0xDEADC0DE;
4093 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004094 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004095
4096 // Make another call. Now 0 should be lost.
4097 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004098 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004099
4100 // Make another call, with null count. Should not crash.
4101 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
4102
4103 // END: Free all remaining items.
4104 for(size_t i = items.size(); i--; )
4105 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4106
4107 items.clear();
4108
Adam Sawickid2924172018-06-11 12:48:46 +02004109 ////////////////////////////////////////////////////////////////////////////////
4110 // Test for allocation too large for pool
4111
4112 {
4113 VmaAllocationCreateInfo allocCreateInfo = {};
4114 allocCreateInfo.pool = pool;
4115
4116 VkMemoryRequirements memReq;
4117 memReq.memoryTypeBits = UINT32_MAX;
4118 memReq.alignment = 1;
4119 memReq.size = poolCreateInfo.blockSize + 4;
4120
4121 VmaAllocation alloc = nullptr;
4122 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004123 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02004124 }
4125
Adam Sawickib8333fb2018-03-13 16:15:53 +01004126 vmaDestroyPool(g_hAllocator, pool);
4127}
4128
Adam Sawickie44c6262018-06-15 14:30:39 +02004129static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
4130{
4131 const uint8_t* pBytes = (const uint8_t*)pMemory;
4132 for(size_t i = 0; i < size; ++i)
4133 {
4134 if(pBytes[i] != pattern)
4135 {
4136 return false;
4137 }
4138 }
4139 return true;
4140}
4141
4142static void TestAllocationsInitialization()
4143{
4144 VkResult res;
4145
4146 const size_t BUF_SIZE = 1024;
4147
4148 // Create pool.
4149
4150 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4151 bufInfo.size = BUF_SIZE;
4152 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4153
4154 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
4155 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4156
4157 VmaPoolCreateInfo poolCreateInfo = {};
4158 poolCreateInfo.blockSize = BUF_SIZE * 10;
4159 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
4160 poolCreateInfo.maxBlockCount = 1;
4161 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004162 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004163
4164 VmaAllocationCreateInfo bufAllocCreateInfo = {};
4165 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004166 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004167
4168 // Create one persistently mapped buffer to keep memory of this block mapped,
4169 // so that pointer to mapped data will remain (more or less...) valid even
4170 // after destruction of other allocations.
4171
4172 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
4173 VkBuffer firstBuf;
4174 VmaAllocation firstAlloc;
4175 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004176 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004177
4178 // Test buffers.
4179
4180 for(uint32_t i = 0; i < 2; ++i)
4181 {
4182 const bool persistentlyMapped = i == 0;
4183 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
4184 VkBuffer buf;
4185 VmaAllocation alloc;
4186 VmaAllocationInfo allocInfo;
4187 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004188 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004189
4190 void* pMappedData;
4191 if(!persistentlyMapped)
4192 {
4193 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004194 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004195 }
4196 else
4197 {
4198 pMappedData = allocInfo.pMappedData;
4199 }
4200
4201 // Validate initialized content
4202 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004203 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02004204
4205 if(!persistentlyMapped)
4206 {
4207 vmaUnmapMemory(g_hAllocator, alloc);
4208 }
4209
4210 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4211
4212 // Validate freed content
4213 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004214 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02004215 }
4216
4217 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
4218 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
4219}
4220
Adam Sawickib8333fb2018-03-13 16:15:53 +01004221static void TestPool_Benchmark(
4222 PoolTestResult& outResult,
4223 const PoolTestConfig& config)
4224{
Adam Sawickib8d34d52018-10-03 17:41:20 +02004225 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004226
4227 RandomNumberGenerator mainRand{config.RandSeed};
4228
4229 uint32_t allocationSizeProbabilitySum = std::accumulate(
4230 config.AllocationSizes.begin(),
4231 config.AllocationSizes.end(),
4232 0u,
4233 [](uint32_t sum, const AllocationSize& allocSize) {
4234 return sum + allocSize.Probability;
4235 });
4236
4237 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4238 bufferInfo.size = 256; // Whatever.
4239 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4240
4241 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4242 imageInfo.imageType = VK_IMAGE_TYPE_2D;
4243 imageInfo.extent.width = 256; // Whatever.
4244 imageInfo.extent.height = 256; // Whatever.
4245 imageInfo.extent.depth = 1;
4246 imageInfo.mipLevels = 1;
4247 imageInfo.arrayLayers = 1;
4248 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4249 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
4250 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
4251 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
4252 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4253
4254 uint32_t bufferMemoryTypeBits = UINT32_MAX;
4255 {
4256 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02004257 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004258 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004259
4260 VkMemoryRequirements memReq;
4261 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
4262 bufferMemoryTypeBits = memReq.memoryTypeBits;
4263
Adam Sawicki1f84f622019-07-02 13:40:01 +02004264 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004265 }
4266
4267 uint32_t imageMemoryTypeBits = UINT32_MAX;
4268 {
4269 VkImage dummyImage;
Adam Sawicki1f84f622019-07-02 13:40:01 +02004270 VkResult res = vkCreateImage(g_hDevice, &imageInfo, g_Allocs, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004271 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004272
4273 VkMemoryRequirements memReq;
4274 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
4275 imageMemoryTypeBits = memReq.memoryTypeBits;
4276
Adam Sawicki1f84f622019-07-02 13:40:01 +02004277 vkDestroyImage(g_hDevice, dummyImage, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004278 }
4279
4280 uint32_t memoryTypeBits = 0;
4281 if(config.UsesBuffers() && config.UsesImages())
4282 {
4283 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
4284 if(memoryTypeBits == 0)
4285 {
4286 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
4287 return;
4288 }
4289 }
4290 else if(config.UsesBuffers())
4291 memoryTypeBits = bufferMemoryTypeBits;
4292 else if(config.UsesImages())
4293 memoryTypeBits = imageMemoryTypeBits;
4294 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004295 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004296
4297 VmaPoolCreateInfo poolCreateInfo = {};
4298 poolCreateInfo.memoryTypeIndex = 0;
4299 poolCreateInfo.minBlockCount = 1;
4300 poolCreateInfo.maxBlockCount = 1;
4301 poolCreateInfo.blockSize = config.PoolSize;
4302 poolCreateInfo.frameInUseCount = 1;
4303
4304 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
4305 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4306 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4307
4308 VmaPool pool;
4309 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004310 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004311
4312 // Start time measurement - after creating pool and initializing data structures.
4313 time_point timeBeg = std::chrono::high_resolution_clock::now();
4314
4315 ////////////////////////////////////////////////////////////////////////////////
4316 // ThreadProc
4317 auto ThreadProc = [&](
4318 PoolTestThreadResult* outThreadResult,
4319 uint32_t randSeed,
4320 HANDLE frameStartEvent,
4321 HANDLE frameEndEvent) -> void
4322 {
4323 RandomNumberGenerator threadRand{randSeed};
4324
4325 outThreadResult->AllocationTimeMin = duration::max();
4326 outThreadResult->AllocationTimeSum = duration::zero();
4327 outThreadResult->AllocationTimeMax = duration::min();
4328 outThreadResult->DeallocationTimeMin = duration::max();
4329 outThreadResult->DeallocationTimeSum = duration::zero();
4330 outThreadResult->DeallocationTimeMax = duration::min();
4331 outThreadResult->AllocationCount = 0;
4332 outThreadResult->DeallocationCount = 0;
4333 outThreadResult->LostAllocationCount = 0;
4334 outThreadResult->LostAllocationTotalSize = 0;
4335 outThreadResult->FailedAllocationCount = 0;
4336 outThreadResult->FailedAllocationTotalSize = 0;
4337
4338 struct Item
4339 {
4340 VkDeviceSize BufferSize;
4341 VkExtent2D ImageSize;
4342 VkBuffer Buf;
4343 VkImage Image;
4344 VmaAllocation Alloc;
4345
4346 VkDeviceSize CalcSizeBytes() const
4347 {
4348 return BufferSize +
4349 ImageSize.width * ImageSize.height * 4;
4350 }
4351 };
4352 std::vector<Item> unusedItems, usedItems;
4353
4354 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
4355
4356 // Create all items - all unused, not yet allocated.
4357 for(size_t i = 0; i < threadTotalItemCount; ++i)
4358 {
4359 Item item = {};
4360
4361 uint32_t allocSizeIndex = 0;
4362 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
4363 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
4364 r -= config.AllocationSizes[allocSizeIndex++].Probability;
4365
4366 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
4367 if(allocSize.BufferSizeMax > 0)
4368 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004369 TEST(allocSize.BufferSizeMin > 0);
4370 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004371 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
4372 item.BufferSize = allocSize.BufferSizeMin;
4373 else
4374 {
4375 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
4376 item.BufferSize = item.BufferSize / 16 * 16;
4377 }
4378 }
4379 else
4380 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004381 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004382 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
4383 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
4384 else
4385 {
4386 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
4387 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
4388 }
4389 }
4390
4391 unusedItems.push_back(item);
4392 }
4393
4394 auto Allocate = [&](Item& item) -> VkResult
4395 {
4396 VmaAllocationCreateInfo allocCreateInfo = {};
4397 allocCreateInfo.pool = pool;
4398 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
4399 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
4400
4401 if(item.BufferSize)
4402 {
4403 bufferInfo.size = item.BufferSize;
4404 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4405 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
4406 }
4407 else
4408 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004409 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004410
4411 imageInfo.extent.width = item.ImageSize.width;
4412 imageInfo.extent.height = item.ImageSize.height;
4413 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4414 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
4415 }
4416 };
4417
4418 ////////////////////////////////////////////////////////////////////////////////
4419 // Frames
4420 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
4421 {
4422 WaitForSingleObject(frameStartEvent, INFINITE);
4423
4424 // Always make some percent of used bufs unused, to choose different used ones.
4425 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
4426 for(size_t i = 0; i < bufsToMakeUnused; ++i)
4427 {
4428 size_t index = threadRand.Generate() % usedItems.size();
4429 unusedItems.push_back(usedItems[index]);
4430 usedItems.erase(usedItems.begin() + index);
4431 }
4432
4433 // Determine which bufs we want to use in this frame.
4434 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
4435 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004436 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01004437 // Move some used to unused.
4438 while(usedBufCount < usedItems.size())
4439 {
4440 size_t index = threadRand.Generate() % usedItems.size();
4441 unusedItems.push_back(usedItems[index]);
4442 usedItems.erase(usedItems.begin() + index);
4443 }
4444 // Move some unused to used.
4445 while(usedBufCount > usedItems.size())
4446 {
4447 size_t index = threadRand.Generate() % unusedItems.size();
4448 usedItems.push_back(unusedItems[index]);
4449 unusedItems.erase(unusedItems.begin() + index);
4450 }
4451
4452 uint32_t touchExistingCount = 0;
4453 uint32_t touchLostCount = 0;
4454 uint32_t createSucceededCount = 0;
4455 uint32_t createFailedCount = 0;
4456
4457 // Touch all used bufs. If not created or lost, allocate.
4458 for(size_t i = 0; i < usedItems.size(); ++i)
4459 {
4460 Item& item = usedItems[i];
4461 // Not yet created.
4462 if(item.Alloc == VK_NULL_HANDLE)
4463 {
4464 res = Allocate(item);
4465 ++outThreadResult->AllocationCount;
4466 if(res != VK_SUCCESS)
4467 {
4468 item.Alloc = VK_NULL_HANDLE;
4469 item.Buf = VK_NULL_HANDLE;
4470 ++outThreadResult->FailedAllocationCount;
4471 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
4472 ++createFailedCount;
4473 }
4474 else
4475 ++createSucceededCount;
4476 }
4477 else
4478 {
4479 // Touch.
4480 VmaAllocationInfo allocInfo;
4481 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
4482 // Lost.
4483 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
4484 {
4485 ++touchLostCount;
4486
4487 // Destroy.
4488 {
4489 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4490 if(item.Buf)
4491 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
4492 else
4493 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
4494 ++outThreadResult->DeallocationCount;
4495 }
4496 item.Alloc = VK_NULL_HANDLE;
4497 item.Buf = VK_NULL_HANDLE;
4498
4499 ++outThreadResult->LostAllocationCount;
4500 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
4501
4502 // Recreate.
4503 res = Allocate(item);
4504 ++outThreadResult->AllocationCount;
4505 // Creation failed.
4506 if(res != VK_SUCCESS)
4507 {
4508 ++outThreadResult->FailedAllocationCount;
4509 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
4510 ++createFailedCount;
4511 }
4512 else
4513 ++createSucceededCount;
4514 }
4515 else
4516 ++touchExistingCount;
4517 }
4518 }
4519
4520 /*
4521 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
4522 randSeed, frameIndex,
4523 touchExistingCount, touchLostCount,
4524 createSucceededCount, createFailedCount);
4525 */
4526
4527 SetEvent(frameEndEvent);
4528 }
4529
4530 // Free all remaining items.
4531 for(size_t i = usedItems.size(); i--; )
4532 {
4533 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4534 if(usedItems[i].Buf)
4535 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
4536 else
4537 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
4538 ++outThreadResult->DeallocationCount;
4539 }
4540 for(size_t i = unusedItems.size(); i--; )
4541 {
4542 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
4543 if(unusedItems[i].Buf)
4544 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
4545 else
4546 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
4547 ++outThreadResult->DeallocationCount;
4548 }
4549 };
4550
4551 // Launch threads.
4552 uint32_t threadRandSeed = mainRand.Generate();
4553 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
4554 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
4555 std::vector<std::thread> bkgThreads;
4556 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
4557 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4558 {
4559 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
4560 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
4561 bkgThreads.emplace_back(std::bind(
4562 ThreadProc,
4563 &threadResults[threadIndex],
4564 threadRandSeed + threadIndex,
4565 frameStartEvents[threadIndex],
4566 frameEndEvents[threadIndex]));
4567 }
4568
4569 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02004570 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004571 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
4572 {
4573 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
4574 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4575 SetEvent(frameStartEvents[threadIndex]);
4576 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
4577 }
4578
4579 // Wait for threads finished
4580 for(size_t i = 0; i < bkgThreads.size(); ++i)
4581 {
4582 bkgThreads[i].join();
4583 CloseHandle(frameEndEvents[i]);
4584 CloseHandle(frameStartEvents[i]);
4585 }
4586 bkgThreads.clear();
4587
4588 // Finish time measurement - before destroying pool.
4589 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
4590
4591 vmaDestroyPool(g_hAllocator, pool);
4592
4593 outResult.AllocationTimeMin = duration::max();
4594 outResult.AllocationTimeAvg = duration::zero();
4595 outResult.AllocationTimeMax = duration::min();
4596 outResult.DeallocationTimeMin = duration::max();
4597 outResult.DeallocationTimeAvg = duration::zero();
4598 outResult.DeallocationTimeMax = duration::min();
4599 outResult.LostAllocationCount = 0;
4600 outResult.LostAllocationTotalSize = 0;
4601 outResult.FailedAllocationCount = 0;
4602 outResult.FailedAllocationTotalSize = 0;
4603 size_t allocationCount = 0;
4604 size_t deallocationCount = 0;
4605 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4606 {
4607 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
4608 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
4609 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
4610 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
4611 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
4612 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
4613 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
4614 allocationCount += threadResult.AllocationCount;
4615 deallocationCount += threadResult.DeallocationCount;
4616 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
4617 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
4618 outResult.LostAllocationCount += threadResult.LostAllocationCount;
4619 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
4620 }
4621 if(allocationCount)
4622 outResult.AllocationTimeAvg /= allocationCount;
4623 if(deallocationCount)
4624 outResult.DeallocationTimeAvg /= deallocationCount;
4625}
4626
4627static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
4628{
4629 if(ptr1 < ptr2)
4630 return ptr1 + size1 > ptr2;
4631 else if(ptr2 < ptr1)
4632 return ptr2 + size2 > ptr1;
4633 else
4634 return true;
4635}
4636
Adam Sawickiefa88c42019-11-18 16:33:56 +01004637static void TestMemoryUsage()
4638{
4639 wprintf(L"Testing memory usage:\n");
4640
Adam Sawicki69185552019-11-18 17:03:34 +01004641 static const VmaMemoryUsage lastUsage = VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED;
Adam Sawickiefa88c42019-11-18 16:33:56 +01004642 for(uint32_t usage = 0; usage <= lastUsage; ++usage)
4643 {
4644 switch(usage)
4645 {
4646 case VMA_MEMORY_USAGE_UNKNOWN: printf(" VMA_MEMORY_USAGE_UNKNOWN:\n"); break;
4647 case VMA_MEMORY_USAGE_GPU_ONLY: printf(" VMA_MEMORY_USAGE_GPU_ONLY:\n"); break;
4648 case VMA_MEMORY_USAGE_CPU_ONLY: printf(" VMA_MEMORY_USAGE_CPU_ONLY:\n"); break;
4649 case VMA_MEMORY_USAGE_CPU_TO_GPU: printf(" VMA_MEMORY_USAGE_CPU_TO_GPU:\n"); break;
4650 case VMA_MEMORY_USAGE_GPU_TO_CPU: printf(" VMA_MEMORY_USAGE_GPU_TO_CPU:\n"); break;
4651 case VMA_MEMORY_USAGE_CPU_COPY: printf(" VMA_MEMORY_USAGE_CPU_COPY:\n"); break;
Adam Sawicki69185552019-11-18 17:03:34 +01004652 case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED: printf(" VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED:\n"); break;
Adam Sawickiefa88c42019-11-18 16:33:56 +01004653 default: assert(0);
4654 }
4655
4656 auto printResult = [](const char* testName, VkResult res, uint32_t memoryTypeBits, uint32_t memoryTypeIndex)
4657 {
4658 if(res == VK_SUCCESS)
4659 printf(" %s: memoryTypeBits=0x%X, memoryTypeIndex=%u\n", testName, memoryTypeBits, memoryTypeIndex);
4660 else
4661 printf(" %s: memoryTypeBits=0x%X, FAILED with res=%d\n", testName, memoryTypeBits, (int32_t)res);
4662 };
4663
4664 // 1: Buffer for copy
4665 {
4666 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4667 bufCreateInfo.size = 65536;
4668 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4669
4670 VkBuffer buf = VK_NULL_HANDLE;
4671 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
4672 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
4673
4674 VkMemoryRequirements memReq = {};
4675 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
4676
4677 VmaAllocationCreateInfo allocCreateInfo = {};
4678 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4679 VmaAllocation alloc = VK_NULL_HANDLE;
4680 VmaAllocationInfo allocInfo = {};
4681 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
4682 if(res == VK_SUCCESS)
4683 {
4684 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4685 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
4686 TEST(res == VK_SUCCESS);
4687 }
4688 printResult("Buffer TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
4689 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4690 }
4691
4692 // 2: Vertex buffer
4693 {
4694 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4695 bufCreateInfo.size = 65536;
4696 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4697
4698 VkBuffer buf = VK_NULL_HANDLE;
4699 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
4700 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
4701
4702 VkMemoryRequirements memReq = {};
4703 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
4704
4705 VmaAllocationCreateInfo allocCreateInfo = {};
4706 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4707 VmaAllocation alloc = VK_NULL_HANDLE;
4708 VmaAllocationInfo allocInfo = {};
4709 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
4710 if(res == VK_SUCCESS)
4711 {
4712 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4713 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
4714 TEST(res == VK_SUCCESS);
4715 }
4716 printResult("Buffer TRANSFER_DST + VERTEX_BUFFER", res, memReq.memoryTypeBits, allocInfo.memoryType);
4717 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4718 }
4719
4720 // 3: Image for copy, OPTIMAL
4721 {
4722 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4723 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4724 imgCreateInfo.extent.width = 256;
4725 imgCreateInfo.extent.height = 256;
4726 imgCreateInfo.extent.depth = 1;
4727 imgCreateInfo.mipLevels = 1;
4728 imgCreateInfo.arrayLayers = 1;
4729 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4730 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4731 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4732 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
4733 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4734
4735 VkImage img = VK_NULL_HANDLE;
4736 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4737 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4738
4739 VkMemoryRequirements memReq = {};
4740 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4741
4742 VmaAllocationCreateInfo allocCreateInfo = {};
4743 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4744 VmaAllocation alloc = VK_NULL_HANDLE;
4745 VmaAllocationInfo allocInfo = {};
4746 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4747 if(res == VK_SUCCESS)
4748 {
4749 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4750 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4751 TEST(res == VK_SUCCESS);
4752 }
4753 printResult("Image OPTIMAL TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
4754
4755 vmaDestroyImage(g_hAllocator, img, alloc);
4756 }
4757
4758 // 4: Image SAMPLED, OPTIMAL
4759 {
4760 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4761 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4762 imgCreateInfo.extent.width = 256;
4763 imgCreateInfo.extent.height = 256;
4764 imgCreateInfo.extent.depth = 1;
4765 imgCreateInfo.mipLevels = 1;
4766 imgCreateInfo.arrayLayers = 1;
4767 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4768 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4769 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4770 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
4771 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4772
4773 VkImage img = VK_NULL_HANDLE;
4774 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4775 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4776
4777 VkMemoryRequirements memReq = {};
4778 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4779
4780 VmaAllocationCreateInfo allocCreateInfo = {};
4781 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4782 VmaAllocation alloc = VK_NULL_HANDLE;
4783 VmaAllocationInfo allocInfo = {};
4784 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4785 if(res == VK_SUCCESS)
4786 {
4787 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4788 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4789 TEST(res == VK_SUCCESS);
4790 }
4791 printResult("Image OPTIMAL TRANSFER_DST + SAMPLED", res, memReq.memoryTypeBits, allocInfo.memoryType);
4792 vmaDestroyImage(g_hAllocator, img, alloc);
4793 }
4794
4795 // 5: Image COLOR_ATTACHMENT, OPTIMAL
4796 {
4797 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4798 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4799 imgCreateInfo.extent.width = 256;
4800 imgCreateInfo.extent.height = 256;
4801 imgCreateInfo.extent.depth = 1;
4802 imgCreateInfo.mipLevels = 1;
4803 imgCreateInfo.arrayLayers = 1;
4804 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4805 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4806 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4807 imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4808 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4809
4810 VkImage img = VK_NULL_HANDLE;
4811 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4812 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4813
4814 VkMemoryRequirements memReq = {};
4815 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4816
4817 VmaAllocationCreateInfo allocCreateInfo = {};
4818 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4819 VmaAllocation alloc = VK_NULL_HANDLE;
4820 VmaAllocationInfo allocInfo = {};
4821 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4822 if(res == VK_SUCCESS)
4823 {
4824 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4825 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4826 TEST(res == VK_SUCCESS);
4827 }
4828 printResult("Image OPTIMAL SAMPLED + COLOR_ATTACHMENT", res, memReq.memoryTypeBits, allocInfo.memoryType);
4829 vmaDestroyImage(g_hAllocator, img, alloc);
4830 }
4831 }
4832}
4833
Adam Sawicki50882502020-02-07 16:51:31 +01004834static uint32_t FindDeviceCoherentMemoryTypeBits()
4835{
4836 VkPhysicalDeviceMemoryProperties memProps;
4837 vkGetPhysicalDeviceMemoryProperties(g_hPhysicalDevice, &memProps);
4838
4839 uint32_t memTypeBits = 0;
4840 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
4841 {
4842 if(memProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD)
4843 memTypeBits |= 1u << i;
4844 }
4845 return memTypeBits;
4846}
4847
4848static void TestDeviceCoherentMemory()
4849{
4850 if(!VK_AMD_device_coherent_memory_enabled)
4851 return;
4852
4853 uint32_t deviceCoherentMemoryTypeBits = FindDeviceCoherentMemoryTypeBits();
4854 // Extension is enabled, feature is enabled, and the device still doesn't support any such memory type?
4855 // OK then, so it's just fake!
4856 if(deviceCoherentMemoryTypeBits == 0)
4857 return;
4858
4859 wprintf(L"Testing device coherent memory...\n");
4860
4861 // 1. Try to allocate buffer from a memory type that is DEVICE_COHERENT.
4862
4863 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4864 bufCreateInfo.size = 0x10000;
4865 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4866
4867 VmaAllocationCreateInfo allocCreateInfo = {};
4868 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4869 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD;
4870
4871 AllocInfo alloc = {};
4872 VmaAllocationInfo allocInfo = {};
4873 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &alloc.m_Buffer, &alloc.m_Allocation, &allocInfo);
4874
4875 // Make sure it succeeded and was really created in such memory type.
4876 TEST(res == VK_SUCCESS);
4877 TEST((1u << allocInfo.memoryType) & deviceCoherentMemoryTypeBits);
4878
4879 alloc.Destroy();
4880
4881 // 2. Try to create a pool in such memory type.
4882 {
4883 VmaPoolCreateInfo poolCreateInfo = {};
4884
4885 res = vmaFindMemoryTypeIndex(g_hAllocator, UINT32_MAX, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4886 TEST(res == VK_SUCCESS);
4887 TEST((1u << poolCreateInfo.memoryTypeIndex) & deviceCoherentMemoryTypeBits);
4888
4889 VmaPool pool = VK_NULL_HANDLE;
4890 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
4891 TEST(res == VK_SUCCESS);
4892
4893 vmaDestroyPool(g_hAllocator, pool);
4894 }
4895
4896 // 3. Try the same with a local allocator created without VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT.
4897
4898 VmaAllocatorCreateInfo allocatorCreateInfo = {};
4899 SetAllocatorCreateInfo(allocatorCreateInfo);
4900 allocatorCreateInfo.flags &= ~VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT;
4901
4902 VmaAllocator localAllocator = VK_NULL_HANDLE;
4903 res = vmaCreateAllocator(&allocatorCreateInfo, &localAllocator);
4904 TEST(res == VK_SUCCESS && localAllocator);
4905
4906 res = vmaCreateBuffer(localAllocator, &bufCreateInfo, &allocCreateInfo, &alloc.m_Buffer, &alloc.m_Allocation, &allocInfo);
4907
4908 // Make sure it failed.
4909 TEST(res != VK_SUCCESS && !alloc.m_Buffer && !alloc.m_Allocation);
4910
4911 // 4. Try to find memory type.
4912 {
4913 uint32_t memTypeIndex = UINT_MAX;
4914 res = vmaFindMemoryTypeIndex(localAllocator, UINT32_MAX, &allocCreateInfo, &memTypeIndex);
4915 TEST(res != VK_SUCCESS);
4916 }
4917
4918 vmaDestroyAllocator(localAllocator);
4919}
4920
Adam Sawicki40ffe982019-10-11 15:56:02 +02004921static void TestBudget()
4922{
4923 wprintf(L"Testing budget...\n");
4924
Adam Sawicki353e3672019-11-02 14:12:05 +01004925 static const VkDeviceSize BUF_SIZE = 100ull * 1024 * 1024;
4926 static const uint32_t BUF_COUNT = 4;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004927
4928 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
4929 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004930 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
4931
4932 VmaBudget budgetBeg[VK_MAX_MEMORY_HEAPS] = {};
4933 vmaGetBudget(g_hAllocator, budgetBeg);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004934
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01004935 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
4936 {
4937 TEST(budgetBeg[i].allocationBytes <= budgetBeg[i].blockBytes);
4938 }
4939
Adam Sawicki40ffe982019-10-11 15:56:02 +02004940 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4941 bufInfo.size = BUF_SIZE;
4942 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4943
4944 VmaAllocationCreateInfo allocCreateInfo = {};
4945 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4946 if(testIndex == 0)
4947 {
4948 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4949 }
4950
4951 // CREATE BUFFERS
4952 uint32_t heapIndex = 0;
4953 BufferInfo bufInfos[BUF_COUNT] = {};
4954 for(uint32_t bufIndex = 0; bufIndex < BUF_COUNT; ++bufIndex)
4955 {
4956 VmaAllocationInfo allocInfo;
4957 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4958 &bufInfos[bufIndex].Buffer, &bufInfos[bufIndex].Allocation, &allocInfo);
4959 TEST(res == VK_SUCCESS);
4960 if(bufIndex == 0)
4961 {
4962 heapIndex = MemoryTypeToHeap(allocInfo.memoryType);
4963 }
4964 else
4965 {
4966 // All buffers need to fall into the same heap.
4967 TEST(MemoryTypeToHeap(allocInfo.memoryType) == heapIndex);
4968 }
4969 }
4970
Adam Sawicki353e3672019-11-02 14:12:05 +01004971 VmaBudget budgetWithBufs[VK_MAX_MEMORY_HEAPS] = {};
4972 vmaGetBudget(g_hAllocator, budgetWithBufs);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004973
4974 // DESTROY BUFFERS
4975 for(size_t bufIndex = BUF_COUNT; bufIndex--; )
4976 {
4977 vmaDestroyBuffer(g_hAllocator, bufInfos[bufIndex].Buffer, bufInfos[bufIndex].Allocation);
4978 }
4979
Adam Sawicki353e3672019-11-02 14:12:05 +01004980 VmaBudget budgetEnd[VK_MAX_MEMORY_HEAPS] = {};
4981 vmaGetBudget(g_hAllocator, budgetEnd);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004982
4983 // CHECK
4984 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
4985 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004986 TEST(budgetEnd[i].allocationBytes <= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004987 if(i == heapIndex)
4988 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004989 TEST(budgetEnd[i].allocationBytes == budgetBeg[i].allocationBytes);
4990 TEST(budgetWithBufs[i].allocationBytes == budgetBeg[i].allocationBytes + BUF_SIZE * BUF_COUNT);
4991 TEST(budgetWithBufs[i].blockBytes >= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004992 }
4993 else
4994 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004995 TEST(budgetEnd[i].allocationBytes == budgetEnd[i].allocationBytes &&
4996 budgetEnd[i].allocationBytes == budgetWithBufs[i].allocationBytes);
4997 TEST(budgetEnd[i].blockBytes == budgetEnd[i].blockBytes &&
4998 budgetEnd[i].blockBytes == budgetWithBufs[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004999 }
5000 }
5001 }
5002}
5003
Adam Sawickib8333fb2018-03-13 16:15:53 +01005004static void TestMapping()
5005{
5006 wprintf(L"Testing mapping...\n");
5007
5008 VkResult res;
5009 uint32_t memTypeIndex = UINT32_MAX;
5010
5011 enum TEST
5012 {
5013 TEST_NORMAL,
5014 TEST_POOL,
5015 TEST_DEDICATED,
5016 TEST_COUNT
5017 };
5018 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
5019 {
5020 VmaPool pool = nullptr;
5021 if(testIndex == TEST_POOL)
5022 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005023 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005024 VmaPoolCreateInfo poolInfo = {};
5025 poolInfo.memoryTypeIndex = memTypeIndex;
5026 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005027 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005028 }
5029
5030 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5031 bufInfo.size = 0x10000;
5032 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005033
Adam Sawickib8333fb2018-03-13 16:15:53 +01005034 VmaAllocationCreateInfo allocCreateInfo = {};
5035 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5036 allocCreateInfo.pool = pool;
5037 if(testIndex == TEST_DEDICATED)
5038 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005039
Adam Sawickib8333fb2018-03-13 16:15:53 +01005040 VmaAllocationInfo allocInfo;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005041
Adam Sawickib8333fb2018-03-13 16:15:53 +01005042 // Mapped manually
5043
5044 // Create 2 buffers.
5045 BufferInfo bufferInfos[3];
5046 for(size_t i = 0; i < 2; ++i)
5047 {
5048 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
5049 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005050 TEST(res == VK_SUCCESS);
5051 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005052 memTypeIndex = allocInfo.memoryType;
5053 }
Adam Sawicki40ffe982019-10-11 15:56:02 +02005054
Adam Sawickib8333fb2018-03-13 16:15:53 +01005055 // Map buffer 0.
5056 char* data00 = nullptr;
5057 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005058 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005059 data00[0xFFFF] = data00[0];
5060
5061 // Map buffer 0 second time.
5062 char* data01 = nullptr;
5063 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005064 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005065
5066 // Map buffer 1.
5067 char* data1 = nullptr;
5068 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005069 TEST(res == VK_SUCCESS && data1 != nullptr);
5070 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01005071 data1[0xFFFF] = data1[0];
5072
5073 // Unmap buffer 0 two times.
5074 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
5075 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
5076 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005077 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005078
5079 // Unmap buffer 1.
5080 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
5081 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005082 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005083
5084 // Create 3rd buffer - persistently mapped.
5085 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
5086 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
5087 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005088 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005089
5090 // Map buffer 2.
5091 char* data2 = nullptr;
5092 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005093 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005094 data2[0xFFFF] = data2[0];
5095
5096 // Unmap buffer 2.
5097 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
5098 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005099 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005100
5101 // Destroy all buffers.
5102 for(size_t i = 3; i--; )
5103 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
5104
5105 vmaDestroyPool(g_hAllocator, pool);
5106 }
5107}
5108
Adam Sawickidaa6a552019-06-25 15:26:37 +02005109// Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
5110static void TestDeviceLocalMapped()
5111{
5112 VkResult res;
5113
5114 for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
5115 {
5116 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5117 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
5118 bufCreateInfo.size = 4096;
5119
5120 VmaPool pool = VK_NULL_HANDLE;
5121 VmaAllocationCreateInfo allocCreateInfo = {};
5122 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
5123 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5124 if(testIndex == 2)
5125 {
5126 VmaPoolCreateInfo poolCreateInfo = {};
5127 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
5128 TEST(res == VK_SUCCESS);
5129 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
5130 TEST(res == VK_SUCCESS);
5131 allocCreateInfo.pool = pool;
5132 }
5133 else if(testIndex == 1)
5134 {
5135 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
5136 }
5137
5138 VkBuffer buf = VK_NULL_HANDLE;
5139 VmaAllocation alloc = VK_NULL_HANDLE;
5140 VmaAllocationInfo allocInfo = {};
5141 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
5142 TEST(res == VK_SUCCESS && alloc);
5143
5144 VkMemoryPropertyFlags memTypeFlags = 0;
5145 vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
5146 const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
5147 TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
5148
5149 vmaDestroyBuffer(g_hAllocator, buf, alloc);
5150 vmaDestroyPool(g_hAllocator, pool);
5151 }
5152}
5153
Adam Sawickib8333fb2018-03-13 16:15:53 +01005154static void TestMappingMultithreaded()
5155{
5156 wprintf(L"Testing mapping multithreaded...\n");
5157
5158 static const uint32_t threadCount = 16;
5159 static const uint32_t bufferCount = 1024;
5160 static const uint32_t threadBufferCount = bufferCount / threadCount;
5161
5162 VkResult res;
5163 volatile uint32_t memTypeIndex = UINT32_MAX;
5164
5165 enum TEST
5166 {
5167 TEST_NORMAL,
5168 TEST_POOL,
5169 TEST_DEDICATED,
5170 TEST_COUNT
5171 };
5172 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
5173 {
5174 VmaPool pool = nullptr;
5175 if(testIndex == TEST_POOL)
5176 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005177 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005178 VmaPoolCreateInfo poolInfo = {};
5179 poolInfo.memoryTypeIndex = memTypeIndex;
5180 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005181 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005182 }
5183
5184 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5185 bufCreateInfo.size = 0x10000;
5186 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5187
5188 VmaAllocationCreateInfo allocCreateInfo = {};
5189 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5190 allocCreateInfo.pool = pool;
5191 if(testIndex == TEST_DEDICATED)
5192 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5193
5194 std::thread threads[threadCount];
5195 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
5196 {
5197 threads[threadIndex] = std::thread([=, &memTypeIndex](){
5198 // ======== THREAD FUNCTION ========
5199
5200 RandomNumberGenerator rand{threadIndex};
5201
5202 enum class MODE
5203 {
5204 // Don't map this buffer at all.
5205 DONT_MAP,
5206 // Map and quickly unmap.
5207 MAP_FOR_MOMENT,
5208 // Map and unmap before destruction.
5209 MAP_FOR_LONGER,
5210 // Map two times. Quickly unmap, second unmap before destruction.
5211 MAP_TWO_TIMES,
5212 // Create this buffer as persistently mapped.
5213 PERSISTENTLY_MAPPED,
5214 COUNT
5215 };
5216 std::vector<BufferInfo> bufInfos{threadBufferCount};
5217 std::vector<MODE> bufModes{threadBufferCount};
5218
5219 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
5220 {
5221 BufferInfo& bufInfo = bufInfos[bufferIndex];
5222 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
5223 bufModes[bufferIndex] = mode;
5224
5225 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
5226 if(mode == MODE::PERSISTENTLY_MAPPED)
5227 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
5228
5229 VmaAllocationInfo allocInfo;
5230 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
5231 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005232 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005233
5234 if(memTypeIndex == UINT32_MAX)
5235 memTypeIndex = allocInfo.memoryType;
5236
5237 char* data = nullptr;
5238
5239 if(mode == MODE::PERSISTENTLY_MAPPED)
5240 {
5241 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02005242 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005243 }
5244 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
5245 mode == MODE::MAP_TWO_TIMES)
5246 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005247 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005248 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005249 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005250
5251 if(mode == MODE::MAP_TWO_TIMES)
5252 {
5253 char* data2 = nullptr;
5254 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005255 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005256 }
5257 }
5258 else if(mode == MODE::DONT_MAP)
5259 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005260 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005261 }
5262 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02005263 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005264
5265 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
5266 if(data)
5267 data[0xFFFF] = data[0];
5268
5269 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
5270 {
5271 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
5272
5273 VmaAllocationInfo allocInfo;
5274 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
5275 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02005276 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005277 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02005278 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005279 }
5280
5281 switch(rand.Generate() % 3)
5282 {
5283 case 0: Sleep(0); break; // Yield.
5284 case 1: Sleep(10); break; // 10 ms
5285 // default: No sleep.
5286 }
5287
5288 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
5289 if(data)
5290 data[0xFFFF] = data[0];
5291 }
5292
5293 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
5294 {
5295 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
5296 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
5297 {
5298 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
5299
5300 VmaAllocationInfo allocInfo;
5301 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005302 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005303 }
5304
5305 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
5306 }
5307 });
5308 }
5309
5310 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
5311 threads[threadIndex].join();
5312
5313 vmaDestroyPool(g_hAllocator, pool);
5314 }
5315}
5316
5317static void WriteMainTestResultHeader(FILE* file)
5318{
5319 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02005320 "Code,Time,"
5321 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01005322 "Total Time (us),"
5323 "Allocation Time Min (us),"
5324 "Allocation Time Avg (us),"
5325 "Allocation Time Max (us),"
5326 "Deallocation Time Min (us),"
5327 "Deallocation Time Avg (us),"
5328 "Deallocation Time Max (us),"
5329 "Total Memory Allocated (B),"
5330 "Free Range Size Avg (B),"
5331 "Free Range Size Max (B)\n");
5332}
5333
5334static void WriteMainTestResult(
5335 FILE* file,
5336 const char* codeDescription,
5337 const char* testDescription,
5338 const Config& config, const Result& result)
5339{
5340 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
5341 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
5342 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
5343 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
5344 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
5345 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
5346 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
5347
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005348 std::string currTime;
5349 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005350
5351 fprintf(file,
5352 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01005353 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
5354 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005355 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02005356 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01005357 totalTimeSeconds * 1e6f,
5358 allocationTimeMinSeconds * 1e6f,
5359 allocationTimeAvgSeconds * 1e6f,
5360 allocationTimeMaxSeconds * 1e6f,
5361 deallocationTimeMinSeconds * 1e6f,
5362 deallocationTimeAvgSeconds * 1e6f,
5363 deallocationTimeMaxSeconds * 1e6f,
5364 result.TotalMemoryAllocated,
5365 result.FreeRangeSizeAvg,
5366 result.FreeRangeSizeMax);
5367}
5368
5369static void WritePoolTestResultHeader(FILE* file)
5370{
5371 fprintf(file,
5372 "Code,Test,Time,"
5373 "Config,"
5374 "Total Time (us),"
5375 "Allocation Time Min (us),"
5376 "Allocation Time Avg (us),"
5377 "Allocation Time Max (us),"
5378 "Deallocation Time Min (us),"
5379 "Deallocation Time Avg (us),"
5380 "Deallocation Time Max (us),"
5381 "Lost Allocation Count,"
5382 "Lost Allocation Total Size (B),"
5383 "Failed Allocation Count,"
5384 "Failed Allocation Total Size (B)\n");
5385}
5386
5387static void WritePoolTestResult(
5388 FILE* file,
5389 const char* codeDescription,
5390 const char* testDescription,
5391 const PoolTestConfig& config,
5392 const PoolTestResult& result)
5393{
5394 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
5395 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
5396 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
5397 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
5398 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
5399 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
5400 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
5401
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005402 std::string currTime;
5403 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005404
5405 fprintf(file,
5406 "%s,%s,%s,"
5407 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
5408 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
5409 // General
5410 codeDescription,
5411 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005412 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01005413 // Config
5414 config.ThreadCount,
5415 (unsigned long long)config.PoolSize,
5416 config.FrameCount,
5417 config.TotalItemCount,
5418 config.UsedItemCountMin,
5419 config.UsedItemCountMax,
5420 config.ItemsToMakeUnusedPercent,
5421 // Results
5422 totalTimeSeconds * 1e6f,
5423 allocationTimeMinSeconds * 1e6f,
5424 allocationTimeAvgSeconds * 1e6f,
5425 allocationTimeMaxSeconds * 1e6f,
5426 deallocationTimeMinSeconds * 1e6f,
5427 deallocationTimeAvgSeconds * 1e6f,
5428 deallocationTimeMaxSeconds * 1e6f,
5429 result.LostAllocationCount,
5430 result.LostAllocationTotalSize,
5431 result.FailedAllocationCount,
5432 result.FailedAllocationTotalSize);
5433}
5434
5435static void PerformCustomMainTest(FILE* file)
5436{
5437 Config config{};
5438 config.RandSeed = 65735476;
5439 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
5440 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
5441 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
5442 config.FreeOrder = FREE_ORDER::FORWARD;
5443 config.ThreadCount = 16;
5444 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02005445 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005446
5447 // Buffers
5448 //config.AllocationSizes.push_back({4, 16, 1024});
5449 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5450
5451 // Images
5452 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5453 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5454
5455 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
5456 config.AdditionalOperationCount = 1024;
5457
5458 Result result{};
5459 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005460 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005461 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
5462}
5463
5464static void PerformCustomPoolTest(FILE* file)
5465{
5466 PoolTestConfig config;
5467 config.PoolSize = 100 * 1024 * 1024;
5468 config.RandSeed = 2345764;
5469 config.ThreadCount = 1;
5470 config.FrameCount = 200;
5471 config.ItemsToMakeUnusedPercent = 2;
5472
5473 AllocationSize allocSize = {};
5474 allocSize.BufferSizeMin = 1024;
5475 allocSize.BufferSizeMax = 1024 * 1024;
5476 allocSize.Probability = 1;
5477 config.AllocationSizes.push_back(allocSize);
5478
5479 allocSize.BufferSizeMin = 0;
5480 allocSize.BufferSizeMax = 0;
5481 allocSize.ImageSizeMin = 128;
5482 allocSize.ImageSizeMax = 1024;
5483 allocSize.Probability = 1;
5484 config.AllocationSizes.push_back(allocSize);
5485
5486 config.PoolSize = config.CalcAvgResourceSize() * 200;
5487 config.UsedItemCountMax = 160;
5488 config.TotalItemCount = config.UsedItemCountMax * 10;
5489 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
5490
5491 g_MemoryAliasingWarningEnabled = false;
5492 PoolTestResult result = {};
5493 TestPool_Benchmark(result, config);
5494 g_MemoryAliasingWarningEnabled = true;
5495
5496 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
5497}
5498
Adam Sawickib8333fb2018-03-13 16:15:53 +01005499static void PerformMainTests(FILE* file)
5500{
5501 uint32_t repeatCount = 1;
5502 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
5503
5504 Config config{};
5505 config.RandSeed = 65735476;
5506 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
5507 config.FreeOrder = FREE_ORDER::FORWARD;
5508
5509 size_t threadCountCount = 1;
5510 switch(ConfigType)
5511 {
5512 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
5513 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
5514 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
5515 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
5516 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
5517 default: assert(0);
5518 }
Adam Sawicki0667e332018-08-24 17:26:44 +02005519
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02005520 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02005521
Adam Sawickib8333fb2018-03-13 16:15:53 +01005522 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
5523 {
5524 std::string desc1;
5525
5526 switch(threadCountIndex)
5527 {
5528 case 0:
5529 desc1 += "1_thread";
5530 config.ThreadCount = 1;
5531 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5532 break;
5533 case 1:
5534 desc1 += "16_threads+0%_common";
5535 config.ThreadCount = 16;
5536 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5537 break;
5538 case 2:
5539 desc1 += "16_threads+50%_common";
5540 config.ThreadCount = 16;
5541 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
5542 break;
5543 case 3:
5544 desc1 += "16_threads+100%_common";
5545 config.ThreadCount = 16;
5546 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
5547 break;
5548 case 4:
5549 desc1 += "2_threads+0%_common";
5550 config.ThreadCount = 2;
5551 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5552 break;
5553 case 5:
5554 desc1 += "2_threads+50%_common";
5555 config.ThreadCount = 2;
5556 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
5557 break;
5558 case 6:
5559 desc1 += "2_threads+100%_common";
5560 config.ThreadCount = 2;
5561 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
5562 break;
5563 default:
5564 assert(0);
5565 }
5566
5567 // 0 = buffers, 1 = images, 2 = buffers and images
5568 size_t buffersVsImagesCount = 2;
5569 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
5570 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
5571 {
5572 std::string desc2 = desc1;
5573 switch(buffersVsImagesIndex)
5574 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005575 case 0: desc2 += ",Buffers"; break;
5576 case 1: desc2 += ",Images"; break;
5577 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005578 default: assert(0);
5579 }
5580
5581 // 0 = small, 1 = large, 2 = small and large
5582 size_t smallVsLargeCount = 2;
5583 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
5584 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
5585 {
5586 std::string desc3 = desc2;
5587 switch(smallVsLargeIndex)
5588 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005589 case 0: desc3 += ",Small"; break;
5590 case 1: desc3 += ",Large"; break;
5591 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005592 default: assert(0);
5593 }
5594
5595 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5596 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
5597 else
5598 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
5599
5600 // 0 = varying sizes min...max, 1 = set of constant sizes
5601 size_t constantSizesCount = 1;
5602 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
5603 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
5604 {
5605 std::string desc4 = desc3;
5606 switch(constantSizesIndex)
5607 {
5608 case 0: desc4 += " Varying_sizes"; break;
5609 case 1: desc4 += " Constant_sizes"; break;
5610 default: assert(0);
5611 }
5612
5613 config.AllocationSizes.clear();
5614 // Buffers present
5615 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
5616 {
5617 // Small
5618 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5619 {
5620 // Varying size
5621 if(constantSizesIndex == 0)
5622 config.AllocationSizes.push_back({4, 16, 1024});
5623 // Constant sizes
5624 else
5625 {
5626 config.AllocationSizes.push_back({1, 16, 16});
5627 config.AllocationSizes.push_back({1, 64, 64});
5628 config.AllocationSizes.push_back({1, 256, 256});
5629 config.AllocationSizes.push_back({1, 1024, 1024});
5630 }
5631 }
5632 // Large
5633 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5634 {
5635 // Varying size
5636 if(constantSizesIndex == 0)
5637 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5638 // Constant sizes
5639 else
5640 {
5641 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
5642 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
5643 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
5644 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
5645 }
5646 }
5647 }
5648 // Images present
5649 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
5650 {
5651 // Small
5652 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5653 {
5654 // Varying size
5655 if(constantSizesIndex == 0)
5656 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5657 // Constant sizes
5658 else
5659 {
5660 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
5661 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
5662 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
5663 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
5664 }
5665 }
5666 // Large
5667 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5668 {
5669 // Varying size
5670 if(constantSizesIndex == 0)
5671 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5672 // Constant sizes
5673 else
5674 {
5675 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
5676 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
5677 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
5678 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
5679 }
5680 }
5681 }
5682
5683 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
5684 size_t beginBytesToAllocateCount = 1;
5685 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
5686 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
5687 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
5688 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
5689 {
5690 std::string desc5 = desc4;
5691
5692 switch(beginBytesToAllocateIndex)
5693 {
5694 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005695 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005696 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
5697 config.AdditionalOperationCount = 0;
5698 break;
5699 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005700 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005701 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
5702 config.AdditionalOperationCount = 1024;
5703 break;
5704 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005705 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005706 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
5707 config.AdditionalOperationCount = 1024;
5708 break;
5709 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005710 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005711 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
5712 config.AdditionalOperationCount = 1024;
5713 break;
5714 default:
5715 assert(0);
5716 }
5717
Adam Sawicki0667e332018-08-24 17:26:44 +02005718 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01005719 {
Adam Sawicki0667e332018-08-24 17:26:44 +02005720 std::string desc6 = desc5;
5721 switch(strategyIndex)
5722 {
5723 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005724 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005725 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
5726 break;
5727 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005728 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005729 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
5730 break;
5731 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005732 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005733 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
5734 break;
5735 default:
5736 assert(0);
5737 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01005738
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005739 desc6 += ',';
5740 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02005741
5742 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02005743
5744 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
5745 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005746 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02005747
5748 Result result{};
5749 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005750 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02005751 if(file)
5752 {
5753 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
5754 }
Adam Sawicki0667e332018-08-24 17:26:44 +02005755 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01005756 }
5757 }
5758 }
5759 }
5760 }
5761 }
5762}
5763
5764static void PerformPoolTests(FILE* file)
5765{
5766 const size_t AVG_RESOURCES_PER_POOL = 300;
5767
5768 uint32_t repeatCount = 1;
5769 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
5770
5771 PoolTestConfig config{};
5772 config.RandSeed = 2346343;
5773 config.FrameCount = 200;
5774 config.ItemsToMakeUnusedPercent = 2;
5775
5776 size_t threadCountCount = 1;
5777 switch(ConfigType)
5778 {
5779 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
5780 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
5781 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
5782 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
5783 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
5784 default: assert(0);
5785 }
5786 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
5787 {
5788 std::string desc1;
5789
5790 switch(threadCountIndex)
5791 {
5792 case 0:
5793 desc1 += "1_thread";
5794 config.ThreadCount = 1;
5795 break;
5796 case 1:
5797 desc1 += "16_threads";
5798 config.ThreadCount = 16;
5799 break;
5800 case 2:
5801 desc1 += "2_threads";
5802 config.ThreadCount = 2;
5803 break;
5804 default:
5805 assert(0);
5806 }
5807
5808 // 0 = buffers, 1 = images, 2 = buffers and images
5809 size_t buffersVsImagesCount = 2;
5810 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
5811 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
5812 {
5813 std::string desc2 = desc1;
5814 switch(buffersVsImagesIndex)
5815 {
5816 case 0: desc2 += " Buffers"; break;
5817 case 1: desc2 += " Images"; break;
5818 case 2: desc2 += " Buffers+Images"; break;
5819 default: assert(0);
5820 }
5821
5822 // 0 = small, 1 = large, 2 = small and large
5823 size_t smallVsLargeCount = 2;
5824 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
5825 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
5826 {
5827 std::string desc3 = desc2;
5828 switch(smallVsLargeIndex)
5829 {
5830 case 0: desc3 += " Small"; break;
5831 case 1: desc3 += " Large"; break;
5832 case 2: desc3 += " Small+Large"; break;
5833 default: assert(0);
5834 }
5835
5836 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5837 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
5838 else
5839 config.PoolSize = 4ull * 1024 * 1024;
5840
5841 // 0 = varying sizes min...max, 1 = set of constant sizes
5842 size_t constantSizesCount = 1;
5843 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
5844 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
5845 {
5846 std::string desc4 = desc3;
5847 switch(constantSizesIndex)
5848 {
5849 case 0: desc4 += " Varying_sizes"; break;
5850 case 1: desc4 += " Constant_sizes"; break;
5851 default: assert(0);
5852 }
5853
5854 config.AllocationSizes.clear();
5855 // Buffers present
5856 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
5857 {
5858 // Small
5859 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5860 {
5861 // Varying size
5862 if(constantSizesIndex == 0)
5863 config.AllocationSizes.push_back({4, 16, 1024});
5864 // Constant sizes
5865 else
5866 {
5867 config.AllocationSizes.push_back({1, 16, 16});
5868 config.AllocationSizes.push_back({1, 64, 64});
5869 config.AllocationSizes.push_back({1, 256, 256});
5870 config.AllocationSizes.push_back({1, 1024, 1024});
5871 }
5872 }
5873 // Large
5874 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5875 {
5876 // Varying size
5877 if(constantSizesIndex == 0)
5878 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5879 // Constant sizes
5880 else
5881 {
5882 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
5883 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
5884 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
5885 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
5886 }
5887 }
5888 }
5889 // Images present
5890 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
5891 {
5892 // Small
5893 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5894 {
5895 // Varying size
5896 if(constantSizesIndex == 0)
5897 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5898 // Constant sizes
5899 else
5900 {
5901 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
5902 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
5903 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
5904 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
5905 }
5906 }
5907 // Large
5908 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5909 {
5910 // Varying size
5911 if(constantSizesIndex == 0)
5912 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5913 // Constant sizes
5914 else
5915 {
5916 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
5917 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
5918 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
5919 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
5920 }
5921 }
5922 }
5923
5924 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
5925 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
5926
5927 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
5928 size_t subscriptionModeCount;
5929 switch(ConfigType)
5930 {
5931 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
5932 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
5933 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
5934 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
5935 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
5936 default: assert(0);
5937 }
5938 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
5939 {
5940 std::string desc5 = desc4;
5941
5942 switch(subscriptionModeIndex)
5943 {
5944 case 0:
5945 desc5 += " Subscription_66%";
5946 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
5947 break;
5948 case 1:
5949 desc5 += " Subscription_133%";
5950 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
5951 break;
5952 case 2:
5953 desc5 += " Subscription_100%";
5954 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
5955 break;
5956 case 3:
5957 desc5 += " Subscription_33%";
5958 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
5959 break;
5960 case 4:
5961 desc5 += " Subscription_166%";
5962 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
5963 break;
5964 default:
5965 assert(0);
5966 }
5967
5968 config.TotalItemCount = config.UsedItemCountMax * 5;
5969 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
5970
5971 const char* testDescription = desc5.c_str();
5972
5973 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
5974 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005975 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005976
5977 PoolTestResult result{};
5978 g_MemoryAliasingWarningEnabled = false;
5979 TestPool_Benchmark(result, config);
5980 g_MemoryAliasingWarningEnabled = true;
5981 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
5982 }
5983 }
5984 }
5985 }
5986 }
5987 }
5988}
5989
Adam Sawickia83793a2018-09-03 13:40:42 +02005990static void BasicTestBuddyAllocator()
5991{
5992 wprintf(L"Basic test buddy allocator\n");
5993
5994 RandomNumberGenerator rand{76543};
5995
5996 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5997 sampleBufCreateInfo.size = 1024; // Whatever.
5998 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5999
6000 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
6001 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
6002
6003 VmaPoolCreateInfo poolCreateInfo = {};
6004 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006005 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006006
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02006007 // Deliberately adding 1023 to test usable size smaller than memory block size.
6008 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02006009 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02006010 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02006011
6012 VmaPool pool = nullptr;
6013 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006014 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006015
6016 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
6017
6018 VmaAllocationCreateInfo allocCreateInfo = {};
6019 allocCreateInfo.pool = pool;
6020
6021 std::vector<BufferInfo> bufInfo;
6022 BufferInfo newBufInfo;
6023 VmaAllocationInfo allocInfo;
6024
6025 bufCreateInfo.size = 1024 * 256;
6026 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6027 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006028 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006029 bufInfo.push_back(newBufInfo);
6030
6031 bufCreateInfo.size = 1024 * 512;
6032 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6033 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006034 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006035 bufInfo.push_back(newBufInfo);
6036
6037 bufCreateInfo.size = 1024 * 128;
6038 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6039 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006040 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006041 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02006042
6043 // Test very small allocation, smaller than minimum node size.
6044 bufCreateInfo.size = 1;
6045 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6046 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006047 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02006048 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02006049
Adam Sawicki9933c5c2018-09-21 14:57:24 +02006050 // Test some small allocation with alignment requirement.
6051 {
6052 VkMemoryRequirements memReq;
6053 memReq.alignment = 256;
6054 memReq.memoryTypeBits = UINT32_MAX;
6055 memReq.size = 32;
6056
6057 newBufInfo.Buffer = VK_NULL_HANDLE;
6058 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
6059 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006060 TEST(res == VK_SUCCESS);
6061 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02006062 bufInfo.push_back(newBufInfo);
6063 }
6064
6065 //SaveAllocatorStatsToFile(L"TEST.json");
6066
Adam Sawicki21017c62018-09-07 15:26:59 +02006067 VmaPoolStats stats = {};
6068 vmaGetPoolStats(g_hAllocator, pool, &stats);
6069 int DBG = 0; // Set breakpoint here to inspect `stats`.
6070
Adam Sawicki80927152018-09-07 17:27:23 +02006071 // Allocate enough new buffers to surely fall into second block.
6072 for(uint32_t i = 0; i < 32; ++i)
6073 {
6074 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
6075 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6076 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006077 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02006078 bufInfo.push_back(newBufInfo);
6079 }
6080
6081 SaveAllocatorStatsToFile(L"BuddyTest01.json");
6082
Adam Sawickia83793a2018-09-03 13:40:42 +02006083 // Destroy the buffers in random order.
6084 while(!bufInfo.empty())
6085 {
6086 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
6087 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
6088 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
6089 bufInfo.erase(bufInfo.begin() + indexToDestroy);
6090 }
6091
6092 vmaDestroyPool(g_hAllocator, pool);
6093}
6094
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006095static void BasicTestAllocatePages()
6096{
6097 wprintf(L"Basic test allocate pages\n");
6098
6099 RandomNumberGenerator rand{765461};
6100
6101 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
6102 sampleBufCreateInfo.size = 1024; // Whatever.
6103 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
6104
6105 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
6106 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
6107
6108 VmaPoolCreateInfo poolCreateInfo = {};
6109 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02006110 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006111
6112 // 1 block of 1 MB.
6113 poolCreateInfo.blockSize = 1024 * 1024;
6114 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
6115
6116 // Create pool.
6117 VmaPool pool = nullptr;
6118 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02006119 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006120
6121 // Make 100 allocations of 4 KB - they should fit into the pool.
6122 VkMemoryRequirements memReq;
6123 memReq.memoryTypeBits = UINT32_MAX;
6124 memReq.alignment = 4 * 1024;
6125 memReq.size = 4 * 1024;
6126
6127 VmaAllocationCreateInfo allocCreateInfo = {};
6128 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
6129 allocCreateInfo.pool = pool;
6130
6131 constexpr uint32_t allocCount = 100;
6132
6133 std::vector<VmaAllocation> alloc{allocCount};
6134 std::vector<VmaAllocationInfo> allocInfo{allocCount};
6135 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006136 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006137 for(uint32_t i = 0; i < allocCount; ++i)
6138 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006139 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006140 allocInfo[i].pMappedData != nullptr &&
6141 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
6142 allocInfo[i].memoryType == allocInfo[0].memoryType);
6143 }
6144
6145 // Free the allocations.
6146 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
6147 std::fill(alloc.begin(), alloc.end(), nullptr);
6148 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
6149
6150 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
6151 // Also test optional allocationInfo = null.
6152 memReq.size = 100 * 1024;
6153 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02006154 TEST(res != VK_SUCCESS);
6155 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006156
6157 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
6158 memReq.size = 4 * 1024;
6159 memReq.alignment = 128 * 1024;
6160 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006161 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006162
6163 // Make 100 dedicated allocations of 4 KB.
6164 memReq.alignment = 4 * 1024;
6165 memReq.size = 4 * 1024;
6166
6167 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
6168 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
6169 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
6170 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006171 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006172 for(uint32_t i = 0; i < allocCount; ++i)
6173 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006174 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006175 allocInfo[i].pMappedData != nullptr &&
6176 allocInfo[i].memoryType == allocInfo[0].memoryType &&
6177 allocInfo[i].offset == 0);
6178 if(i > 0)
6179 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006180 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006181 }
6182 }
6183
6184 // Free the allocations.
6185 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
6186 std::fill(alloc.begin(), alloc.end(), nullptr);
6187 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
6188
6189 vmaDestroyPool(g_hAllocator, pool);
6190}
6191
Adam Sawickif2975342018-10-16 13:49:02 +02006192// Test the testing environment.
6193static void TestGpuData()
6194{
6195 RandomNumberGenerator rand = { 53434 };
6196
6197 std::vector<AllocInfo> allocInfo;
6198
6199 for(size_t i = 0; i < 100; ++i)
6200 {
6201 AllocInfo info = {};
6202
6203 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
6204 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
6205 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
6206 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
6207 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
6208
6209 VmaAllocationCreateInfo allocCreateInfo = {};
6210 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
6211
6212 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
6213 TEST(res == VK_SUCCESS);
6214
6215 info.m_StartValue = rand.Generate();
6216
6217 allocInfo.push_back(std::move(info));
6218 }
6219
6220 UploadGpuData(allocInfo.data(), allocInfo.size());
6221
6222 ValidateGpuData(allocInfo.data(), allocInfo.size());
6223
6224 DestroyAllAllocations(allocInfo);
6225}
6226
Adam Sawickib8333fb2018-03-13 16:15:53 +01006227void Test()
6228{
6229 wprintf(L"TESTING:\n");
6230
Adam Sawicki48b8a332019-11-02 15:24:33 +01006231 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02006232 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01006233 ////////////////////////////////////////////////////////////////////////////////
6234 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02006235 return;
6236 }
6237
Adam Sawickib8333fb2018-03-13 16:15:53 +01006238 // # Simple tests
6239
6240 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02006241 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02006242#if VMA_DEBUG_MARGIN
6243 TestDebugMargin();
6244#else
6245 TestPool_SameSize();
Adam Sawickiddcbf8c2019-11-22 15:22:42 +01006246 TestPool_MinBlockCount();
Adam Sawicki212a4a62018-06-14 15:44:45 +02006247 TestHeapSizeLimit();
6248#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02006249#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
6250 TestAllocationsInitialization();
6251#endif
Adam Sawickiefa88c42019-11-18 16:33:56 +01006252 TestMemoryUsage();
Adam Sawicki50882502020-02-07 16:51:31 +01006253 TestDeviceCoherentMemory();
Adam Sawicki40ffe982019-10-11 15:56:02 +02006254 TestBudget();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006255 TestMapping();
Adam Sawickidaa6a552019-06-25 15:26:37 +02006256 TestDeviceLocalMapped();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006257 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02006258 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02006259 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02006260 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006261
Adam Sawicki4338f662018-09-07 14:12:37 +02006262 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006263 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02006264
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006265 {
6266 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02006267 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006268 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02006269 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006270 fclose(file);
6271 }
6272
Adam Sawickib8333fb2018-03-13 16:15:53 +01006273 TestDefragmentationSimple();
6274 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01006275 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01006276 TestDefragmentationGpu();
Adam Sawickia52012d2019-12-23 15:28:51 +01006277 TestDefragmentationIncrementalBasic();
6278 TestDefragmentationIncrementalComplex();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006279
6280 // # Detailed tests
6281 FILE* file;
6282 fopen_s(&file, "Results.csv", "w");
6283 assert(file != NULL);
6284
6285 WriteMainTestResultHeader(file);
6286 PerformMainTests(file);
6287 //PerformCustomMainTest(file);
6288
6289 WritePoolTestResultHeader(file);
6290 PerformPoolTests(file);
6291 //PerformCustomPoolTest(file);
6292
6293 fclose(file);
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01006294
Adam Sawickib8333fb2018-03-13 16:15:53 +01006295 wprintf(L"Done.\n");
6296}
6297
Adam Sawickif1a793c2018-03-13 15:42:22 +01006298#endif // #ifdef _WIN32