blob: f2a4226cd31541e6e9d685d86d59ac96d7f03786 [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
2// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawicki94ce3d72019-04-17 14:59:25 +020029#include <functional>
Adam Sawickif1a793c2018-03-13 15:42:22 +010030
31#ifdef _WIN32
32
Adam Sawicki33d2ce72018-08-27 13:59:13 +020033static const char* CODE_DESCRIPTION = "Foo";
34
Adam Sawickif2975342018-10-16 13:49:02 +020035extern VkCommandBuffer g_hTemporaryCommandBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +020036extern const VkAllocationCallbacks* g_Allocs;
Adam Sawickif2975342018-10-16 13:49:02 +020037void BeginSingleTimeCommands();
38void EndSingleTimeCommands();
39
Adam Sawickibdb89a92018-12-13 11:56:30 +010040#ifndef VMA_DEBUG_MARGIN
41 #define VMA_DEBUG_MARGIN 0
42#endif
43
Adam Sawicki0a607132018-08-24 11:18:41 +020044enum CONFIG_TYPE {
45 CONFIG_TYPE_MINIMUM,
46 CONFIG_TYPE_SMALL,
47 CONFIG_TYPE_AVERAGE,
48 CONFIG_TYPE_LARGE,
49 CONFIG_TYPE_MAXIMUM,
50 CONFIG_TYPE_COUNT
51};
52
Adam Sawickif2975342018-10-16 13:49:02 +020053static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
54//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020055
Adam Sawickib8333fb2018-03-13 16:15:53 +010056enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
57
Adam Sawicki0667e332018-08-24 17:26:44 +020058static const char* FREE_ORDER_NAMES[] = {
59 "FORWARD",
60 "BACKWARD",
61 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020062};
63
Adam Sawicki80927152018-09-07 17:27:23 +020064// Copy of internal VmaAlgorithmToStr.
65static const char* AlgorithmToStr(uint32_t algorithm)
66{
67 switch(algorithm)
68 {
69 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
70 return "Linear";
71 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
72 return "Buddy";
73 case 0:
74 return "Default";
75 default:
76 assert(0);
77 return "";
78 }
79}
80
Adam Sawickib8333fb2018-03-13 16:15:53 +010081struct AllocationSize
82{
83 uint32_t Probability;
84 VkDeviceSize BufferSizeMin, BufferSizeMax;
85 uint32_t ImageSizeMin, ImageSizeMax;
86};
87
88struct Config
89{
90 uint32_t RandSeed;
91 VkDeviceSize BeginBytesToAllocate;
92 uint32_t AdditionalOperationCount;
93 VkDeviceSize MaxBytesToAllocate;
94 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
95 std::vector<AllocationSize> AllocationSizes;
96 uint32_t ThreadCount;
97 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
98 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020099 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +0100100};
101
102struct Result
103{
104 duration TotalTime;
105 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
106 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
107 VkDeviceSize TotalMemoryAllocated;
108 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
109};
110
111void TestDefragmentationSimple();
112void TestDefragmentationFull();
113
114struct PoolTestConfig
115{
116 uint32_t RandSeed;
117 uint32_t ThreadCount;
118 VkDeviceSize PoolSize;
119 uint32_t FrameCount;
120 uint32_t TotalItemCount;
121 // Range for number of items used in each frame.
122 uint32_t UsedItemCountMin, UsedItemCountMax;
123 // Percent of items to make unused, and possibly make some others used in each frame.
124 uint32_t ItemsToMakeUnusedPercent;
125 std::vector<AllocationSize> AllocationSizes;
126
127 VkDeviceSize CalcAvgResourceSize() const
128 {
129 uint32_t probabilitySum = 0;
130 VkDeviceSize sizeSum = 0;
131 for(size_t i = 0; i < AllocationSizes.size(); ++i)
132 {
133 const AllocationSize& allocSize = AllocationSizes[i];
134 if(allocSize.BufferSizeMax > 0)
135 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
136 else
137 {
138 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
139 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
140 }
141 probabilitySum += allocSize.Probability;
142 }
143 return sizeSum / probabilitySum;
144 }
145
146 bool UsesBuffers() const
147 {
148 for(size_t i = 0; i < AllocationSizes.size(); ++i)
149 if(AllocationSizes[i].BufferSizeMax > 0)
150 return true;
151 return false;
152 }
153
154 bool UsesImages() const
155 {
156 for(size_t i = 0; i < AllocationSizes.size(); ++i)
157 if(AllocationSizes[i].ImageSizeMax > 0)
158 return true;
159 return false;
160 }
161};
162
163struct PoolTestResult
164{
165 duration TotalTime;
166 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
167 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
168 size_t LostAllocationCount, LostAllocationTotalSize;
169 size_t FailedAllocationCount, FailedAllocationTotalSize;
170};
171
172static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
173
Adam Sawicki51fa9662018-10-03 13:44:29 +0200174uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200175
Adam Sawickib8333fb2018-03-13 16:15:53 +0100176struct BufferInfo
177{
178 VkBuffer Buffer = VK_NULL_HANDLE;
179 VmaAllocation Allocation = VK_NULL_HANDLE;
180};
181
Adam Sawicki40ffe982019-10-11 15:56:02 +0200182static uint32_t MemoryTypeToHeap(uint32_t memoryTypeIndex)
183{
184 const VkPhysicalDeviceMemoryProperties* props;
185 vmaGetMemoryProperties(g_hAllocator, &props);
186 return props->memoryTypes[memoryTypeIndex].heapIndex;
187}
188
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200189static uint32_t GetAllocationStrategyCount()
190{
191 uint32_t strategyCount = 0;
192 switch(ConfigType)
193 {
194 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
195 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
196 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
197 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
198 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
199 default: assert(0);
200 }
201 return strategyCount;
202}
203
204static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
205{
206 switch(allocStrategy)
207 {
208 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
209 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
210 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
211 case 0: return "Default"; break;
212 default: assert(0); return "";
213 }
214}
215
Adam Sawickib8333fb2018-03-13 16:15:53 +0100216static void InitResult(Result& outResult)
217{
218 outResult.TotalTime = duration::zero();
219 outResult.AllocationTimeMin = duration::max();
220 outResult.AllocationTimeAvg = duration::zero();
221 outResult.AllocationTimeMax = duration::min();
222 outResult.DeallocationTimeMin = duration::max();
223 outResult.DeallocationTimeAvg = duration::zero();
224 outResult.DeallocationTimeMax = duration::min();
225 outResult.TotalMemoryAllocated = 0;
226 outResult.FreeRangeSizeAvg = 0;
227 outResult.FreeRangeSizeMax = 0;
228}
229
230class TimeRegisterObj
231{
232public:
233 TimeRegisterObj(duration& min, duration& sum, duration& max) :
234 m_Min(min),
235 m_Sum(sum),
236 m_Max(max),
237 m_TimeBeg(std::chrono::high_resolution_clock::now())
238 {
239 }
240
241 ~TimeRegisterObj()
242 {
243 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
244 m_Sum += d;
245 if(d < m_Min) m_Min = d;
246 if(d > m_Max) m_Max = d;
247 }
248
249private:
250 duration& m_Min;
251 duration& m_Sum;
252 duration& m_Max;
253 time_point m_TimeBeg;
254};
255
256struct PoolTestThreadResult
257{
258 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
259 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
260 size_t AllocationCount, DeallocationCount;
261 size_t LostAllocationCount, LostAllocationTotalSize;
262 size_t FailedAllocationCount, FailedAllocationTotalSize;
263};
264
265class AllocationTimeRegisterObj : public TimeRegisterObj
266{
267public:
268 AllocationTimeRegisterObj(Result& result) :
269 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
270 {
271 }
272};
273
274class DeallocationTimeRegisterObj : public TimeRegisterObj
275{
276public:
277 DeallocationTimeRegisterObj(Result& result) :
278 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
279 {
280 }
281};
282
283class PoolAllocationTimeRegisterObj : public TimeRegisterObj
284{
285public:
286 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
287 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
288 {
289 }
290};
291
292class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
293{
294public:
295 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
296 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
297 {
298 }
299};
300
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200301static void CurrentTimeToStr(std::string& out)
302{
303 time_t rawTime; time(&rawTime);
304 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
305 char timeStr[128];
306 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
307 out = timeStr;
308}
309
Adam Sawickib8333fb2018-03-13 16:15:53 +0100310VkResult MainTest(Result& outResult, const Config& config)
311{
312 assert(config.ThreadCount > 0);
313
314 InitResult(outResult);
315
316 RandomNumberGenerator mainRand{config.RandSeed};
317
318 time_point timeBeg = std::chrono::high_resolution_clock::now();
319
320 std::atomic<size_t> allocationCount = 0;
321 VkResult res = VK_SUCCESS;
322
323 uint32_t memUsageProbabilitySum =
324 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
325 config.MemUsageProbability[2] + config.MemUsageProbability[3];
326 assert(memUsageProbabilitySum > 0);
327
328 uint32_t allocationSizeProbabilitySum = std::accumulate(
329 config.AllocationSizes.begin(),
330 config.AllocationSizes.end(),
331 0u,
332 [](uint32_t sum, const AllocationSize& allocSize) {
333 return sum + allocSize.Probability;
334 });
335
336 struct Allocation
337 {
338 VkBuffer Buffer;
339 VkImage Image;
340 VmaAllocation Alloc;
341 };
342
343 std::vector<Allocation> commonAllocations;
344 std::mutex commonAllocationsMutex;
345
346 auto Allocate = [&](
347 VkDeviceSize bufferSize,
348 const VkExtent2D imageExtent,
349 RandomNumberGenerator& localRand,
350 VkDeviceSize& totalAllocatedBytes,
351 std::vector<Allocation>& allocations) -> VkResult
352 {
353 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
354
355 uint32_t memUsageIndex = 0;
356 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
357 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
358 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
359
360 VmaAllocationCreateInfo memReq = {};
361 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200362 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100363
364 Allocation allocation = {};
365 VmaAllocationInfo allocationInfo;
366
367 // Buffer
368 if(bufferSize > 0)
369 {
370 assert(imageExtent.width == 0);
371 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
372 bufferInfo.size = bufferSize;
373 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
374
375 {
376 AllocationTimeRegisterObj timeRegisterObj{outResult};
377 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
378 }
379 }
380 // Image
381 else
382 {
383 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
384 imageInfo.imageType = VK_IMAGE_TYPE_2D;
385 imageInfo.extent.width = imageExtent.width;
386 imageInfo.extent.height = imageExtent.height;
387 imageInfo.extent.depth = 1;
388 imageInfo.mipLevels = 1;
389 imageInfo.arrayLayers = 1;
390 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
391 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
392 VK_IMAGE_TILING_OPTIMAL :
393 VK_IMAGE_TILING_LINEAR;
394 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
395 switch(memReq.usage)
396 {
397 case VMA_MEMORY_USAGE_GPU_ONLY:
398 switch(localRand.Generate() % 3)
399 {
400 case 0:
401 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
402 break;
403 case 1:
404 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
405 break;
406 case 2:
407 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
408 break;
409 }
410 break;
411 case VMA_MEMORY_USAGE_CPU_ONLY:
412 case VMA_MEMORY_USAGE_CPU_TO_GPU:
413 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
414 break;
415 case VMA_MEMORY_USAGE_GPU_TO_CPU:
416 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
417 break;
418 }
419 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
420 imageInfo.flags = 0;
421
422 {
423 AllocationTimeRegisterObj timeRegisterObj{outResult};
424 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
425 }
426 }
427
428 if(res == VK_SUCCESS)
429 {
430 ++allocationCount;
431 totalAllocatedBytes += allocationInfo.size;
432 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
433 if(useCommonAllocations)
434 {
435 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
436 commonAllocations.push_back(allocation);
437 }
438 else
439 allocations.push_back(allocation);
440 }
441 else
442 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200443 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100444 }
445 return res;
446 };
447
448 auto GetNextAllocationSize = [&](
449 VkDeviceSize& outBufSize,
450 VkExtent2D& outImageSize,
451 RandomNumberGenerator& localRand)
452 {
453 outBufSize = 0;
454 outImageSize = {0, 0};
455
456 uint32_t allocSizeIndex = 0;
457 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
458 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
459 r -= config.AllocationSizes[allocSizeIndex++].Probability;
460
461 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
462 if(allocSize.BufferSizeMax > 0)
463 {
464 assert(allocSize.ImageSizeMax == 0);
465 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
466 outBufSize = allocSize.BufferSizeMin;
467 else
468 {
469 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
470 outBufSize = outBufSize / 16 * 16;
471 }
472 }
473 else
474 {
475 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
476 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
477 else
478 {
479 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
480 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
481 }
482 }
483 };
484
485 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
486 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
487
488 auto ThreadProc = [&](uint32_t randSeed) -> void
489 {
490 RandomNumberGenerator threadRand(randSeed);
491 VkDeviceSize threadTotalAllocatedBytes = 0;
492 std::vector<Allocation> threadAllocations;
493 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
494 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
495 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
496
497 // BEGIN ALLOCATIONS
498 for(;;)
499 {
500 VkDeviceSize bufferSize = 0;
501 VkExtent2D imageExtent = {};
502 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
503 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
504 threadBeginBytesToAllocate)
505 {
506 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
507 break;
508 }
509 else
510 break;
511 }
512
513 // ADDITIONAL ALLOCATIONS AND FREES
514 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
515 {
516 VkDeviceSize bufferSize = 0;
517 VkExtent2D imageExtent = {};
518 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
519
520 // true = allocate, false = free
521 bool allocate = threadRand.Generate() % 2 != 0;
522
523 if(allocate)
524 {
525 if(threadTotalAllocatedBytes +
526 bufferSize +
527 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
528 threadMaxBytesToAllocate)
529 {
530 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
531 break;
532 }
533 }
534 else
535 {
536 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
537 if(useCommonAllocations)
538 {
539 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
540 if(!commonAllocations.empty())
541 {
542 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
543 VmaAllocationInfo allocationInfo;
544 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
545 if(threadTotalAllocatedBytes >= allocationInfo.size)
546 {
547 DeallocationTimeRegisterObj timeRegisterObj{outResult};
548 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
549 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
550 else
551 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
552 threadTotalAllocatedBytes -= allocationInfo.size;
553 commonAllocations.erase(commonAllocations.begin() + indexToFree);
554 }
555 }
556 }
557 else
558 {
559 if(!threadAllocations.empty())
560 {
561 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
562 VmaAllocationInfo allocationInfo;
563 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
564 if(threadTotalAllocatedBytes >= allocationInfo.size)
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 threadTotalAllocatedBytes -= allocationInfo.size;
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 }
575 }
576 }
577 }
578
579 ++numThreadsReachedMaxAllocations;
580
581 WaitForSingleObject(threadsFinishEvent, INFINITE);
582
583 // DEALLOCATION
584 while(!threadAllocations.empty())
585 {
586 size_t indexToFree = 0;
587 switch(config.FreeOrder)
588 {
589 case FREE_ORDER::FORWARD:
590 indexToFree = 0;
591 break;
592 case FREE_ORDER::BACKWARD:
593 indexToFree = threadAllocations.size() - 1;
594 break;
595 case FREE_ORDER::RANDOM:
596 indexToFree = mainRand.Generate() % threadAllocations.size();
597 break;
598 }
599
600 {
601 DeallocationTimeRegisterObj timeRegisterObj{outResult};
602 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
603 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
604 else
605 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
606 }
607 threadAllocations.erase(threadAllocations.begin() + indexToFree);
608 }
609 };
610
611 uint32_t threadRandSeed = mainRand.Generate();
612 std::vector<std::thread> bkgThreads;
613 for(size_t i = 0; i < config.ThreadCount; ++i)
614 {
615 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
616 }
617
618 // Wait for threads reached max allocations
619 while(numThreadsReachedMaxAllocations < config.ThreadCount)
620 Sleep(0);
621
622 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
623 VmaStats vmaStats = {};
624 vmaCalculateStats(g_hAllocator, &vmaStats);
625 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
626 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
627 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
628
629 // Signal threads to deallocate
630 SetEvent(threadsFinishEvent);
631
632 // Wait for threads finished
633 for(size_t i = 0; i < bkgThreads.size(); ++i)
634 bkgThreads[i].join();
635 bkgThreads.clear();
636
637 CloseHandle(threadsFinishEvent);
638
639 // Deallocate remaining common resources
640 while(!commonAllocations.empty())
641 {
642 size_t indexToFree = 0;
643 switch(config.FreeOrder)
644 {
645 case FREE_ORDER::FORWARD:
646 indexToFree = 0;
647 break;
648 case FREE_ORDER::BACKWARD:
649 indexToFree = commonAllocations.size() - 1;
650 break;
651 case FREE_ORDER::RANDOM:
652 indexToFree = mainRand.Generate() % commonAllocations.size();
653 break;
654 }
655
656 {
657 DeallocationTimeRegisterObj timeRegisterObj{outResult};
658 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
659 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
660 else
661 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
662 }
663 commonAllocations.erase(commonAllocations.begin() + indexToFree);
664 }
665
666 if(allocationCount)
667 {
668 outResult.AllocationTimeAvg /= allocationCount;
669 outResult.DeallocationTimeAvg /= allocationCount;
670 }
671
672 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
673
674 return res;
675}
676
Adam Sawicki51fa9662018-10-03 13:44:29 +0200677void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100678{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100679 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100680 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200681 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100682 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200683 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100684}
685
686struct AllocInfo
687{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200688 VmaAllocation m_Allocation = VK_NULL_HANDLE;
689 VkBuffer m_Buffer = VK_NULL_HANDLE;
690 VkImage m_Image = VK_NULL_HANDLE;
691 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100692 union
693 {
694 VkBufferCreateInfo m_BufferInfo;
695 VkImageCreateInfo m_ImageInfo;
696 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200697
698 void CreateBuffer(
699 const VkBufferCreateInfo& bufCreateInfo,
700 const VmaAllocationCreateInfo& allocCreateInfo);
701 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100702};
703
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200704void AllocInfo::CreateBuffer(
705 const VkBufferCreateInfo& bufCreateInfo,
706 const VmaAllocationCreateInfo& allocCreateInfo)
707{
708 m_BufferInfo = bufCreateInfo;
709 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
710 TEST(res == VK_SUCCESS);
711}
712
713void AllocInfo::Destroy()
714{
715 if(m_Image)
716 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200717 vkDestroyImage(g_hDevice, m_Image, g_Allocs);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200718 }
719 if(m_Buffer)
720 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200721 vkDestroyBuffer(g_hDevice, m_Buffer, g_Allocs);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200722 }
723 if(m_Allocation)
724 {
725 vmaFreeMemory(g_hAllocator, m_Allocation);
726 }
727}
728
Adam Sawickif2975342018-10-16 13:49:02 +0200729class StagingBufferCollection
730{
731public:
732 StagingBufferCollection() { }
733 ~StagingBufferCollection();
734 // Returns false if maximum total size of buffers would be exceeded.
735 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
736 void ReleaseAllBuffers();
737
738private:
739 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
740 struct BufInfo
741 {
742 VmaAllocation Allocation = VK_NULL_HANDLE;
743 VkBuffer Buffer = VK_NULL_HANDLE;
744 VkDeviceSize Size = VK_WHOLE_SIZE;
745 void* MappedPtr = nullptr;
746 bool Used = false;
747 };
748 std::vector<BufInfo> m_Bufs;
749 // Including both used and unused.
750 VkDeviceSize m_TotalSize = 0;
751};
752
753StagingBufferCollection::~StagingBufferCollection()
754{
755 for(size_t i = m_Bufs.size(); i--; )
756 {
757 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
758 }
759}
760
761bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
762{
763 assert(size <= MAX_TOTAL_SIZE);
764
765 // Try to find existing unused buffer with best size.
766 size_t bestIndex = SIZE_MAX;
767 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
768 {
769 BufInfo& currBufInfo = m_Bufs[i];
770 if(!currBufInfo.Used && currBufInfo.Size >= size &&
771 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
772 {
773 bestIndex = i;
774 }
775 }
776
777 if(bestIndex != SIZE_MAX)
778 {
779 m_Bufs[bestIndex].Used = true;
780 outBuffer = m_Bufs[bestIndex].Buffer;
781 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
782 return true;
783 }
784
785 // Allocate new buffer with requested size.
786 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
787 {
788 BufInfo bufInfo;
789 bufInfo.Size = size;
790 bufInfo.Used = true;
791
792 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
793 bufCreateInfo.size = size;
794 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
795
796 VmaAllocationCreateInfo allocCreateInfo = {};
797 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
798 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
799
800 VmaAllocationInfo allocInfo;
801 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
802 bufInfo.MappedPtr = allocInfo.pMappedData;
803 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
804
805 outBuffer = bufInfo.Buffer;
806 outMappedPtr = bufInfo.MappedPtr;
807
808 m_Bufs.push_back(std::move(bufInfo));
809
810 m_TotalSize += size;
811
812 return true;
813 }
814
815 // There are some unused but smaller buffers: Free them and try again.
816 bool hasUnused = false;
817 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
818 {
819 if(!m_Bufs[i].Used)
820 {
821 hasUnused = true;
822 break;
823 }
824 }
825 if(hasUnused)
826 {
827 for(size_t i = m_Bufs.size(); i--; )
828 {
829 if(!m_Bufs[i].Used)
830 {
831 m_TotalSize -= m_Bufs[i].Size;
832 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
833 m_Bufs.erase(m_Bufs.begin() + i);
834 }
835 }
836
837 return AcquireBuffer(size, outBuffer, outMappedPtr);
838 }
839
840 return false;
841}
842
843void StagingBufferCollection::ReleaseAllBuffers()
844{
845 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
846 {
847 m_Bufs[i].Used = false;
848 }
849}
850
851static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
852{
853 StagingBufferCollection stagingBufs;
854
855 bool cmdBufferStarted = false;
856 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
857 {
858 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
859 if(currAllocInfo.m_Buffer)
860 {
861 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
862
863 VkBuffer stagingBuf = VK_NULL_HANDLE;
864 void* stagingBufMappedPtr = nullptr;
865 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
866 {
867 TEST(cmdBufferStarted);
868 EndSingleTimeCommands();
869 stagingBufs.ReleaseAllBuffers();
870 cmdBufferStarted = false;
871
872 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
873 TEST(ok);
874 }
875
876 // Fill staging buffer.
877 {
878 assert(size % sizeof(uint32_t) == 0);
879 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
880 uint32_t val = currAllocInfo.m_StartValue;
881 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
882 {
883 *stagingValPtr = val;
884 ++stagingValPtr;
885 ++val;
886 }
887 }
888
889 // Issue copy command from staging buffer to destination buffer.
890 if(!cmdBufferStarted)
891 {
892 cmdBufferStarted = true;
893 BeginSingleTimeCommands();
894 }
895
896 VkBufferCopy copy = {};
897 copy.srcOffset = 0;
898 copy.dstOffset = 0;
899 copy.size = size;
900 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
901 }
902 else
903 {
904 TEST(0 && "Images not currently supported.");
905 }
906 }
907
908 if(cmdBufferStarted)
909 {
910 EndSingleTimeCommands();
911 stagingBufs.ReleaseAllBuffers();
912 }
913}
914
915static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
916{
917 StagingBufferCollection stagingBufs;
918
919 bool cmdBufferStarted = false;
920 size_t validateAllocIndexOffset = 0;
921 std::vector<void*> validateStagingBuffers;
922 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
923 {
924 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
925 if(currAllocInfo.m_Buffer)
926 {
927 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
928
929 VkBuffer stagingBuf = VK_NULL_HANDLE;
930 void* stagingBufMappedPtr = nullptr;
931 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
932 {
933 TEST(cmdBufferStarted);
934 EndSingleTimeCommands();
935 cmdBufferStarted = false;
936
937 for(size_t validateIndex = 0;
938 validateIndex < validateStagingBuffers.size();
939 ++validateIndex)
940 {
941 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
942 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
943 TEST(validateSize % sizeof(uint32_t) == 0);
944 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
945 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
946 bool valid = true;
947 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
948 {
949 if(*stagingValPtr != val)
950 {
951 valid = false;
952 break;
953 }
954 ++stagingValPtr;
955 ++val;
956 }
957 TEST(valid);
958 }
959
960 stagingBufs.ReleaseAllBuffers();
961
962 validateAllocIndexOffset = allocInfoIndex;
963 validateStagingBuffers.clear();
964
965 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
966 TEST(ok);
967 }
968
969 // Issue copy command from staging buffer to destination buffer.
970 if(!cmdBufferStarted)
971 {
972 cmdBufferStarted = true;
973 BeginSingleTimeCommands();
974 }
975
976 VkBufferCopy copy = {};
977 copy.srcOffset = 0;
978 copy.dstOffset = 0;
979 copy.size = size;
980 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
981
982 // Sava mapped pointer for later validation.
983 validateStagingBuffers.push_back(stagingBufMappedPtr);
984 }
985 else
986 {
987 TEST(0 && "Images not currently supported.");
988 }
989 }
990
991 if(cmdBufferStarted)
992 {
993 EndSingleTimeCommands();
994
995 for(size_t validateIndex = 0;
996 validateIndex < validateStagingBuffers.size();
997 ++validateIndex)
998 {
999 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
1000 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
1001 TEST(validateSize % sizeof(uint32_t) == 0);
1002 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
1003 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
1004 bool valid = true;
1005 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
1006 {
1007 if(*stagingValPtr != val)
1008 {
1009 valid = false;
1010 break;
1011 }
1012 ++stagingValPtr;
1013 ++val;
1014 }
1015 TEST(valid);
1016 }
1017
1018 stagingBufs.ReleaseAllBuffers();
1019 }
1020}
1021
Adam Sawickib8333fb2018-03-13 16:15:53 +01001022static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1023{
1024 outMemReq = {};
1025 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1026 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1027}
1028
1029static void CreateBuffer(
1030 VmaPool pool,
1031 const VkBufferCreateInfo& bufCreateInfo,
1032 bool persistentlyMapped,
1033 AllocInfo& outAllocInfo)
1034{
1035 outAllocInfo = {};
1036 outAllocInfo.m_BufferInfo = bufCreateInfo;
1037
1038 VmaAllocationCreateInfo allocCreateInfo = {};
1039 allocCreateInfo.pool = pool;
1040 if(persistentlyMapped)
1041 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1042
1043 VmaAllocationInfo vmaAllocInfo = {};
1044 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1045
1046 // Setup StartValue and fill.
1047 {
1048 outAllocInfo.m_StartValue = (uint32_t)rand();
1049 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001050 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001051 if(!persistentlyMapped)
1052 {
1053 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1054 }
1055
1056 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001057 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001058 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1059 data[i] = value++;
1060
1061 if(!persistentlyMapped)
1062 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1063 }
1064}
1065
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001066static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001067{
1068 outAllocation.m_Allocation = nullptr;
1069 outAllocation.m_Buffer = nullptr;
1070 outAllocation.m_Image = nullptr;
1071 outAllocation.m_StartValue = (uint32_t)rand();
1072
1073 VmaAllocationCreateInfo vmaMemReq;
1074 GetMemReq(vmaMemReq);
1075
1076 VmaAllocationInfo allocInfo;
1077
1078 const bool isBuffer = true;//(rand() & 0x1) != 0;
1079 const bool isLarge = (rand() % 16) == 0;
1080 if(isBuffer)
1081 {
1082 const uint32_t bufferSize = isLarge ?
1083 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1084 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1085
1086 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1087 bufferInfo.size = bufferSize;
1088 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1089
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001090 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001091 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001092 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001093 }
1094 else
1095 {
1096 const uint32_t imageSizeX = isLarge ?
1097 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1098 rand() % 1024 + 1; // 1 ... 1024
1099 const uint32_t imageSizeY = isLarge ?
1100 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1101 rand() % 1024 + 1; // 1 ... 1024
1102
1103 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1104 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1105 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1106 imageInfo.extent.width = imageSizeX;
1107 imageInfo.extent.height = imageSizeY;
1108 imageInfo.extent.depth = 1;
1109 imageInfo.mipLevels = 1;
1110 imageInfo.arrayLayers = 1;
1111 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1112 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1113 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1114 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1115
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001116 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001117 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001118 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001119 }
1120
1121 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1122 if(allocInfo.pMappedData == nullptr)
1123 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001124 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126 }
1127
1128 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001129 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001130 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1131 data[i] = value++;
1132
1133 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001134 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001135}
1136
1137static void DestroyAllocation(const AllocInfo& allocation)
1138{
1139 if(allocation.m_Buffer)
1140 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1141 else
1142 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1143}
1144
1145static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1146{
1147 for(size_t i = allocations.size(); i--; )
1148 DestroyAllocation(allocations[i]);
1149 allocations.clear();
1150}
1151
1152static void ValidateAllocationData(const AllocInfo& allocation)
1153{
1154 VmaAllocationInfo allocInfo;
1155 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1156
1157 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1158 if(allocInfo.pMappedData == nullptr)
1159 {
1160 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001161 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001162 }
1163
1164 uint32_t value = allocation.m_StartValue;
1165 bool ok = true;
1166 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001167 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001168 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1169 {
1170 if(data[i] != value++)
1171 {
1172 ok = false;
1173 break;
1174 }
1175 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001176 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001177
1178 if(allocInfo.pMappedData == nullptr)
1179 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1180}
1181
1182static void RecreateAllocationResource(AllocInfo& allocation)
1183{
1184 VmaAllocationInfo allocInfo;
1185 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1186
1187 if(allocation.m_Buffer)
1188 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001189 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001190
Adam Sawicki1f84f622019-07-02 13:40:01 +02001191 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, g_Allocs, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001192 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001193
1194 // Just to silence validation layer warnings.
1195 VkMemoryRequirements vkMemReq;
1196 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001197 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001198
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001199 res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001200 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001201 }
1202 else
1203 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001204 vkDestroyImage(g_hDevice, allocation.m_Image, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001205
Adam Sawicki1f84f622019-07-02 13:40:01 +02001206 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, g_Allocs, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001207 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001208
1209 // Just to silence validation layer warnings.
1210 VkMemoryRequirements vkMemReq;
1211 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1212
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001213 res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001214 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001215 }
1216}
1217
1218static void Defragment(AllocInfo* allocs, size_t allocCount,
1219 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1220 VmaDefragmentationStats* defragmentationStats = nullptr)
1221{
1222 std::vector<VmaAllocation> vmaAllocs(allocCount);
1223 for(size_t i = 0; i < allocCount; ++i)
1224 vmaAllocs[i] = allocs[i].m_Allocation;
1225
1226 std::vector<VkBool32> allocChanged(allocCount);
1227
1228 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1229 defragmentationInfo, defragmentationStats) );
1230
1231 for(size_t i = 0; i < allocCount; ++i)
1232 {
1233 if(allocChanged[i])
1234 {
1235 RecreateAllocationResource(allocs[i]);
1236 }
1237 }
1238}
1239
1240static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1241{
1242 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1243 ValidateAllocationData(allocInfo);
1244 });
1245}
1246
1247void TestDefragmentationSimple()
1248{
1249 wprintf(L"Test defragmentation simple\n");
1250
1251 RandomNumberGenerator rand(667);
1252
1253 const VkDeviceSize BUF_SIZE = 0x10000;
1254 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1255
1256 const VkDeviceSize MIN_BUF_SIZE = 32;
1257 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1258 auto RandomBufSize = [&]() -> VkDeviceSize {
1259 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1260 };
1261
1262 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1263 bufCreateInfo.size = BUF_SIZE;
1264 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1265
1266 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1267 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1268
1269 uint32_t memTypeIndex = UINT32_MAX;
1270 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1271
1272 VmaPoolCreateInfo poolCreateInfo = {};
1273 poolCreateInfo.blockSize = BLOCK_SIZE;
1274 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1275
1276 VmaPool pool;
1277 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1278
Adam Sawickie1681912018-11-23 17:50:12 +01001279 // Defragmentation of empty pool.
1280 {
1281 VmaDefragmentationInfo2 defragInfo = {};
1282 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1283 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1284 defragInfo.poolCount = 1;
1285 defragInfo.pPools = &pool;
1286
1287 VmaDefragmentationStats defragStats = {};
1288 VmaDefragmentationContext defragCtx = nullptr;
1289 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1290 TEST(res >= VK_SUCCESS);
1291 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1292 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1293 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1294 }
1295
Adam Sawickib8333fb2018-03-13 16:15:53 +01001296 std::vector<AllocInfo> allocations;
1297
1298 // persistentlyMappedOption = 0 - not persistently mapped.
1299 // persistentlyMappedOption = 1 - persistently mapped.
1300 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1301 {
1302 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1303 const bool persistentlyMapped = persistentlyMappedOption != 0;
1304
1305 // # Test 1
1306 // Buffers of fixed size.
1307 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1308 // Expected result: at least 1 block freed.
1309 {
1310 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1311 {
1312 AllocInfo allocInfo;
1313 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1314 allocations.push_back(allocInfo);
1315 }
1316
1317 for(size_t i = 1; i < allocations.size(); ++i)
1318 {
1319 DestroyAllocation(allocations[i]);
1320 allocations.erase(allocations.begin() + i);
1321 }
1322
1323 VmaDefragmentationStats defragStats;
1324 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001325 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1326 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001327
1328 ValidateAllocationsData(allocations.data(), allocations.size());
1329
1330 DestroyAllAllocations(allocations);
1331 }
1332
1333 // # Test 2
1334 // Buffers of fixed size.
1335 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1336 // Expected result: Each of 4 interations makes some progress.
1337 {
1338 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1339 {
1340 AllocInfo allocInfo;
1341 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1342 allocations.push_back(allocInfo);
1343 }
1344
1345 for(size_t i = 1; i < allocations.size(); ++i)
1346 {
1347 DestroyAllocation(allocations[i]);
1348 allocations.erase(allocations.begin() + i);
1349 }
1350
1351 VmaDefragmentationInfo defragInfo = {};
1352 defragInfo.maxAllocationsToMove = 1;
1353 defragInfo.maxBytesToMove = BUF_SIZE;
1354
1355 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1356 {
1357 VmaDefragmentationStats defragStats;
1358 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001359 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001360 }
1361
1362 ValidateAllocationsData(allocations.data(), allocations.size());
1363
1364 DestroyAllAllocations(allocations);
1365 }
1366
1367 // # Test 3
1368 // Buffers of variable size.
1369 // Create a number of buffers. Remove some percent of them.
1370 // Defragment while having some percent of them unmovable.
1371 // Expected result: Just simple validation.
1372 {
1373 for(size_t i = 0; i < 100; ++i)
1374 {
1375 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1376 localBufCreateInfo.size = RandomBufSize();
1377
1378 AllocInfo allocInfo;
1379 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1380 allocations.push_back(allocInfo);
1381 }
1382
1383 const uint32_t percentToDelete = 60;
1384 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1385 for(size_t i = 0; i < numberToDelete; ++i)
1386 {
1387 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1388 DestroyAllocation(allocations[indexToDelete]);
1389 allocations.erase(allocations.begin() + indexToDelete);
1390 }
1391
1392 // Non-movable allocations will be at the beginning of allocations array.
1393 const uint32_t percentNonMovable = 20;
1394 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1395 for(size_t i = 0; i < numberNonMovable; ++i)
1396 {
1397 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1398 if(indexNonMovable != i)
1399 std::swap(allocations[i], allocations[indexNonMovable]);
1400 }
1401
1402 VmaDefragmentationStats defragStats;
1403 Defragment(
1404 allocations.data() + numberNonMovable,
1405 allocations.size() - numberNonMovable,
1406 nullptr, &defragStats);
1407
1408 ValidateAllocationsData(allocations.data(), allocations.size());
1409
1410 DestroyAllAllocations(allocations);
1411 }
1412 }
1413
Adam Sawicki647cf242018-11-23 17:58:00 +01001414 /*
1415 Allocation that must be move to an overlapping place using memmove().
1416 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1417 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001418 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001419 {
1420 AllocInfo allocInfo[2];
1421
1422 bufCreateInfo.size = BUF_SIZE;
1423 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1424 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1425 bufCreateInfo.size = biggerBufSize;
1426 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1427
1428 DestroyAllocation(allocInfo[0]);
1429
1430 VmaDefragmentationStats defragStats;
1431 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1432 // If this fails, it means we couldn't do memmove with overlapping regions.
1433 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1434
1435 ValidateAllocationsData(&allocInfo[1], 1);
1436 DestroyAllocation(allocInfo[1]);
1437 }
1438
Adam Sawickib8333fb2018-03-13 16:15:53 +01001439 vmaDestroyPool(g_hAllocator, pool);
1440}
1441
Adam Sawicki52076eb2018-11-22 16:14:50 +01001442void TestDefragmentationWholePool()
1443{
1444 wprintf(L"Test defragmentation whole pool\n");
1445
1446 RandomNumberGenerator rand(668);
1447
1448 const VkDeviceSize BUF_SIZE = 0x10000;
1449 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1450
1451 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1452 bufCreateInfo.size = BUF_SIZE;
1453 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1454
1455 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1456 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1457
1458 uint32_t memTypeIndex = UINT32_MAX;
1459 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1460
1461 VmaPoolCreateInfo poolCreateInfo = {};
1462 poolCreateInfo.blockSize = BLOCK_SIZE;
1463 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1464
1465 VmaDefragmentationStats defragStats[2];
1466 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1467 {
1468 VmaPool pool;
1469 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1470
1471 std::vector<AllocInfo> allocations;
1472
1473 // Buffers of fixed size.
1474 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1475 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1476 {
1477 AllocInfo allocInfo;
1478 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1479 allocations.push_back(allocInfo);
1480 }
1481
1482 for(size_t i = 1; i < allocations.size(); ++i)
1483 {
1484 DestroyAllocation(allocations[i]);
1485 allocations.erase(allocations.begin() + i);
1486 }
1487
1488 VmaDefragmentationInfo2 defragInfo = {};
1489 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1490 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1491 std::vector<VmaAllocation> allocationsToDefrag;
1492 if(caseIndex == 0)
1493 {
1494 defragInfo.poolCount = 1;
1495 defragInfo.pPools = &pool;
1496 }
1497 else
1498 {
1499 const size_t allocCount = allocations.size();
1500 allocationsToDefrag.resize(allocCount);
1501 std::transform(
1502 allocations.begin(), allocations.end(),
1503 allocationsToDefrag.begin(),
1504 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1505 defragInfo.allocationCount = (uint32_t)allocCount;
1506 defragInfo.pAllocations = allocationsToDefrag.data();
1507 }
1508
1509 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1510 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1511 TEST(res >= VK_SUCCESS);
1512 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1513
1514 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1515
1516 ValidateAllocationsData(allocations.data(), allocations.size());
1517
1518 DestroyAllAllocations(allocations);
1519
1520 vmaDestroyPool(g_hAllocator, pool);
1521 }
1522
1523 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1524 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1525 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1526 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1527}
1528
Adam Sawickib8333fb2018-03-13 16:15:53 +01001529void TestDefragmentationFull()
1530{
1531 std::vector<AllocInfo> allocations;
1532
1533 // Create initial allocations.
1534 for(size_t i = 0; i < 400; ++i)
1535 {
1536 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001537 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001538 allocations.push_back(allocation);
1539 }
1540
1541 // Delete random allocations
1542 const size_t allocationsToDeletePercent = 80;
1543 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1544 for(size_t i = 0; i < allocationsToDelete; ++i)
1545 {
1546 size_t index = (size_t)rand() % allocations.size();
1547 DestroyAllocation(allocations[index]);
1548 allocations.erase(allocations.begin() + index);
1549 }
1550
1551 for(size_t i = 0; i < allocations.size(); ++i)
1552 ValidateAllocationData(allocations[i]);
1553
Adam Sawicki0667e332018-08-24 17:26:44 +02001554 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001555
1556 {
1557 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1558 for(size_t i = 0; i < allocations.size(); ++i)
1559 vmaAllocations[i] = allocations[i].m_Allocation;
1560
1561 const size_t nonMovablePercent = 0;
1562 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1563 for(size_t i = 0; i < nonMovableCount; ++i)
1564 {
1565 size_t index = (size_t)rand() % vmaAllocations.size();
1566 vmaAllocations.erase(vmaAllocations.begin() + index);
1567 }
1568
1569 const uint32_t defragCount = 1;
1570 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1571 {
1572 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1573
1574 VmaDefragmentationInfo defragmentationInfo;
1575 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1576 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1577
1578 wprintf(L"Defragmentation #%u\n", defragIndex);
1579
1580 time_point begTime = std::chrono::high_resolution_clock::now();
1581
1582 VmaDefragmentationStats stats;
1583 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001584 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001585
1586 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1587
1588 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1589 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1590 wprintf(L"Time: %.2f s\n", defragmentDuration);
1591
1592 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1593 {
1594 if(allocationsChanged[i])
1595 {
1596 RecreateAllocationResource(allocations[i]);
1597 }
1598 }
1599
1600 for(size_t i = 0; i < allocations.size(); ++i)
1601 ValidateAllocationData(allocations[i]);
1602
Adam Sawicki0667e332018-08-24 17:26:44 +02001603 //wchar_t fileName[MAX_PATH];
1604 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1605 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001606 }
1607 }
1608
1609 // Destroy all remaining allocations.
1610 DestroyAllAllocations(allocations);
1611}
1612
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001613static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001614{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001615 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001616 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001617
1618 std::vector<AllocInfo> allocations;
1619
1620 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001621 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1622 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001623 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001624 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1625 const size_t percentToLeave = 30;
1626 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001627 RandomNumberGenerator rand = { 234522 };
1628
1629 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001630
1631 VmaAllocationCreateInfo allocCreateInfo = {};
1632 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001633 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001634
1635 // Create all intended buffers.
1636 for(size_t i = 0; i < bufCount; ++i)
1637 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001638 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1639
1640 if(rand.Generate() % 100 < percentNonMovable)
1641 {
1642 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1643 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1644 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1645 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1646 }
1647 else
1648 {
1649 // Different usage just to see different color in output from VmaDumpVis.
1650 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1651 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1652 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1653 // And in JSON dump.
1654 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1655 }
1656
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001657 AllocInfo alloc;
1658 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1659 alloc.m_StartValue = rand.Generate();
1660 allocations.push_back(alloc);
1661 }
1662
1663 // Destroy some percentage of them.
1664 {
1665 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1666 for(size_t i = 0; i < buffersToDestroy; ++i)
1667 {
1668 const size_t index = rand.Generate() % allocations.size();
1669 allocations[index].Destroy();
1670 allocations.erase(allocations.begin() + index);
1671 }
1672 }
1673
1674 // Fill them with meaningful data.
1675 UploadGpuData(allocations.data(), allocations.size());
1676
Adam Sawickic6ede152018-11-16 17:04:14 +01001677 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001678 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001679 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001680
1681 // Defragment using GPU only.
1682 {
1683 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001684
Adam Sawickic6ede152018-11-16 17:04:14 +01001685 std::vector<VmaAllocation> allocationPtrs;
1686 std::vector<VkBool32> allocationChanged;
1687 std::vector<size_t> allocationOriginalIndex;
1688
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001689 for(size_t i = 0; i < allocCount; ++i)
1690 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001691 VmaAllocationInfo allocInfo = {};
1692 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1693 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1694 {
1695 allocationPtrs.push_back(allocations[i].m_Allocation);
1696 allocationChanged.push_back(VK_FALSE);
1697 allocationOriginalIndex.push_back(i);
1698 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001699 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001700
1701 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001702
1703 BeginSingleTimeCommands();
1704
1705 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001706 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001707 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001708 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001709 defragInfo.pAllocationsChanged = allocationChanged.data();
1710 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001711 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1712 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1713
1714 VmaDefragmentationStats stats = {};
1715 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1716 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1717 TEST(res >= VK_SUCCESS);
1718
1719 EndSingleTimeCommands();
1720
1721 vmaDefragmentationEnd(g_hAllocator, ctx);
1722
Adam Sawickic6ede152018-11-16 17:04:14 +01001723 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001724 {
1725 if(allocationChanged[i])
1726 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001727 const size_t origAllocIndex = allocationOriginalIndex[i];
1728 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001729 }
1730 }
1731
Adam Sawicki4d844e22019-01-24 16:21:05 +01001732 // If corruption detection is enabled, GPU defragmentation may not work on
1733 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001734 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001735 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1736 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001737 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001738 }
1739
1740 ValidateGpuData(allocations.data(), allocations.size());
1741
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001742 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001743 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001744
1745 // Destroy all remaining buffers.
1746 for(size_t i = allocations.size(); i--; )
1747 {
1748 allocations[i].Destroy();
1749 }
Adam Sawicki05704002018-11-08 16:07:29 +01001750
1751 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001752}
1753
Adam Sawickib8333fb2018-03-13 16:15:53 +01001754static void TestUserData()
1755{
1756 VkResult res;
1757
1758 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1759 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1760 bufCreateInfo.size = 0x10000;
1761
1762 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1763 {
1764 // Opaque pointer
1765 {
1766
1767 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1768 void* pointerToSomething = &res;
1769
1770 VmaAllocationCreateInfo allocCreateInfo = {};
1771 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1772 allocCreateInfo.pUserData = numberAsPointer;
1773 if(testIndex == 1)
1774 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1775
1776 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1777 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001778 TEST(res == VK_SUCCESS);
1779 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001780
1781 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001782 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001783
1784 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1785 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001786 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001787
1788 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1789 }
1790
1791 // String
1792 {
1793 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1794 const char* name2 = "2";
1795 const size_t name1Len = strlen(name1);
1796
1797 char* name1Buf = new char[name1Len + 1];
1798 strcpy_s(name1Buf, name1Len + 1, name1);
1799
1800 VmaAllocationCreateInfo allocCreateInfo = {};
1801 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1802 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1803 allocCreateInfo.pUserData = name1Buf;
1804 if(testIndex == 1)
1805 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1806
1807 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1808 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001809 TEST(res == VK_SUCCESS);
1810 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1811 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001812
1813 delete[] name1Buf;
1814
1815 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001816 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001817
1818 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1819 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001820 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001821
1822 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1823 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001824 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001825
1826 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1827 }
1828 }
1829}
1830
Adam Sawicki370ab182018-11-08 16:31:00 +01001831static void TestInvalidAllocations()
1832{
1833 VkResult res;
1834
1835 VmaAllocationCreateInfo allocCreateInfo = {};
1836 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1837
1838 // Try to allocate 0 bytes.
1839 {
1840 VkMemoryRequirements memReq = {};
1841 memReq.size = 0; // !!!
1842 memReq.alignment = 4;
1843 memReq.memoryTypeBits = UINT32_MAX;
1844 VmaAllocation alloc = VK_NULL_HANDLE;
1845 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1846 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1847 }
1848
1849 // Try to create buffer with size = 0.
1850 {
1851 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1852 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1853 bufCreateInfo.size = 0; // !!!
1854 VkBuffer buf = VK_NULL_HANDLE;
1855 VmaAllocation alloc = VK_NULL_HANDLE;
1856 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1857 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1858 }
1859
1860 // Try to create image with one dimension = 0.
1861 {
1862 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1863 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1864 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1865 imageCreateInfo.extent.width = 128;
1866 imageCreateInfo.extent.height = 0; // !!!
1867 imageCreateInfo.extent.depth = 1;
1868 imageCreateInfo.mipLevels = 1;
1869 imageCreateInfo.arrayLayers = 1;
1870 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1871 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1872 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1873 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1874 VkImage image = VK_NULL_HANDLE;
1875 VmaAllocation alloc = VK_NULL_HANDLE;
1876 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1877 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1878 }
1879}
1880
Adam Sawickib8333fb2018-03-13 16:15:53 +01001881static void TestMemoryRequirements()
1882{
1883 VkResult res;
1884 VkBuffer buf;
1885 VmaAllocation alloc;
1886 VmaAllocationInfo allocInfo;
1887
1888 const VkPhysicalDeviceMemoryProperties* memProps;
1889 vmaGetMemoryProperties(g_hAllocator, &memProps);
1890
1891 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1892 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1893 bufInfo.size = 128;
1894
1895 VmaAllocationCreateInfo allocCreateInfo = {};
1896
1897 // No requirements.
1898 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001899 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001900 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1901
1902 // Usage.
1903 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1904 allocCreateInfo.requiredFlags = 0;
1905 allocCreateInfo.preferredFlags = 0;
1906 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1907
1908 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001909 TEST(res == VK_SUCCESS);
1910 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001911 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1912
1913 // Required flags, preferred flags.
1914 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1915 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1916 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1917 allocCreateInfo.memoryTypeBits = 0;
1918
1919 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001920 TEST(res == VK_SUCCESS);
1921 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1922 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001923 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1924
1925 // memoryTypeBits.
1926 const uint32_t memType = allocInfo.memoryType;
1927 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1928 allocCreateInfo.requiredFlags = 0;
1929 allocCreateInfo.preferredFlags = 0;
1930 allocCreateInfo.memoryTypeBits = 1u << memType;
1931
1932 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001933 TEST(res == VK_SUCCESS);
1934 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001935 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1936
1937}
1938
1939static void TestBasics()
1940{
1941 VkResult res;
1942
1943 TestMemoryRequirements();
1944
1945 // Lost allocation
1946 {
1947 VmaAllocation alloc = VK_NULL_HANDLE;
1948 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001949 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001950
1951 VmaAllocationInfo allocInfo;
1952 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001953 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1954 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001955
1956 vmaFreeMemory(g_hAllocator, alloc);
1957 }
1958
1959 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1960 {
1961 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1962 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1963 bufCreateInfo.size = 128;
1964
1965 VmaAllocationCreateInfo allocCreateInfo = {};
1966 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1967 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1968
1969 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1970 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001971 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001972
1973 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1974
1975 // Same with OWN_MEMORY.
1976 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1977
1978 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001979 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001980
1981 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1982 }
1983
1984 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001985
1986 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001987}
1988
1989void TestHeapSizeLimit()
1990{
1991 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1992 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1993
1994 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1995 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1996 {
1997 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1998 }
1999
2000 VmaAllocatorCreateInfo allocatorCreateInfo = {};
2001 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
2002 allocatorCreateInfo.device = g_hDevice;
2003 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
2004
2005 VmaAllocator hAllocator;
2006 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002007 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002008
2009 struct Item
2010 {
2011 VkBuffer hBuf;
2012 VmaAllocation hAlloc;
2013 };
2014 std::vector<Item> items;
2015
2016 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2017 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2018
2019 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
2020 VmaAllocationInfo ownAllocInfo;
2021 {
2022 VmaAllocationCreateInfo allocCreateInfo = {};
2023 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2024 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2025
2026 bufCreateInfo.size = BLOCK_SIZE / 2;
2027
2028 for(size_t i = 0; i < 2; ++i)
2029 {
2030 Item item;
2031 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002032 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002033 items.push_back(item);
2034 }
2035 }
2036
2037 // Create pool to make sure allocations must be out of this memory type.
2038 VmaPoolCreateInfo poolCreateInfo = {};
2039 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
2040 poolCreateInfo.blockSize = BLOCK_SIZE;
2041
2042 VmaPool hPool;
2043 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002044 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002045
2046 // 2. Allocate normal buffers from all the remaining memory.
2047 {
2048 VmaAllocationCreateInfo allocCreateInfo = {};
2049 allocCreateInfo.pool = hPool;
2050
2051 bufCreateInfo.size = BLOCK_SIZE / 2;
2052
2053 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2054 for(size_t i = 0; i < bufCount; ++i)
2055 {
2056 Item item;
2057 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002058 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002059 items.push_back(item);
2060 }
2061 }
2062
2063 // 3. Allocation of one more (even small) buffer should fail.
2064 {
2065 VmaAllocationCreateInfo allocCreateInfo = {};
2066 allocCreateInfo.pool = hPool;
2067
2068 bufCreateInfo.size = 128;
2069
2070 VkBuffer hBuf;
2071 VmaAllocation hAlloc;
2072 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002073 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002074 }
2075
2076 // Destroy everything.
2077 for(size_t i = items.size(); i--; )
2078 {
2079 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2080 }
2081
2082 vmaDestroyPool(hAllocator, hPool);
2083
2084 vmaDestroyAllocator(hAllocator);
2085}
2086
Adam Sawicki212a4a62018-06-14 15:44:45 +02002087#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002088static void TestDebugMargin()
2089{
2090 if(VMA_DEBUG_MARGIN == 0)
2091 {
2092 return;
2093 }
2094
2095 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002096 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002097
2098 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002099 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002100
2101 // Create few buffers of different size.
2102 const size_t BUF_COUNT = 10;
2103 BufferInfo buffers[BUF_COUNT];
2104 VmaAllocationInfo allocInfo[BUF_COUNT];
2105 for(size_t i = 0; i < 10; ++i)
2106 {
2107 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002108 // Last one will be mapped.
2109 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002110
2111 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002112 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002113 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002114 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002115
2116 if(i == BUF_COUNT - 1)
2117 {
2118 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002119 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002120 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2121 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2122 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002123 }
2124
2125 // Check if their offsets preserve margin between them.
2126 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2127 {
2128 if(lhs.deviceMemory != rhs.deviceMemory)
2129 {
2130 return lhs.deviceMemory < rhs.deviceMemory;
2131 }
2132 return lhs.offset < rhs.offset;
2133 });
2134 for(size_t i = 1; i < BUF_COUNT; ++i)
2135 {
2136 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2137 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002138 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002139 }
2140 }
2141
Adam Sawicki212a4a62018-06-14 15:44:45 +02002142 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002143 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002144
Adam Sawicki73b16652018-06-11 16:39:25 +02002145 // Destroy all buffers.
2146 for(size_t i = BUF_COUNT; i--; )
2147 {
2148 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2149 }
2150}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002151#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002152
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002153static void TestLinearAllocator()
2154{
2155 wprintf(L"Test linear allocator\n");
2156
2157 RandomNumberGenerator rand{645332};
2158
2159 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2160 sampleBufCreateInfo.size = 1024; // Whatever.
2161 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2162
2163 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2164 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2165
2166 VmaPoolCreateInfo poolCreateInfo = {};
2167 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002168 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002169
Adam Sawickiee082772018-06-20 17:45:49 +02002170 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002171 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2172 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2173
2174 VmaPool pool = nullptr;
2175 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002176 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002177
2178 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2179
2180 VmaAllocationCreateInfo allocCreateInfo = {};
2181 allocCreateInfo.pool = pool;
2182
2183 constexpr size_t maxBufCount = 100;
2184 std::vector<BufferInfo> bufInfo;
2185
2186 constexpr VkDeviceSize bufSizeMin = 16;
2187 constexpr VkDeviceSize bufSizeMax = 1024;
2188 VmaAllocationInfo allocInfo;
2189 VkDeviceSize prevOffset = 0;
2190
2191 // Test one-time free.
2192 for(size_t i = 0; i < 2; ++i)
2193 {
2194 // Allocate number of buffers of varying size that surely fit into this block.
2195 VkDeviceSize bufSumSize = 0;
2196 for(size_t i = 0; i < maxBufCount; ++i)
2197 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002198 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002199 BufferInfo newBufInfo;
2200 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2201 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002202 TEST(res == VK_SUCCESS);
2203 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002204 bufInfo.push_back(newBufInfo);
2205 prevOffset = allocInfo.offset;
2206 bufSumSize += bufCreateInfo.size;
2207 }
2208
2209 // Validate pool stats.
2210 VmaPoolStats stats;
2211 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002212 TEST(stats.size == poolCreateInfo.blockSize);
2213 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2214 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002215
2216 // Destroy the buffers in random order.
2217 while(!bufInfo.empty())
2218 {
2219 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2220 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2221 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2222 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2223 }
2224 }
2225
2226 // Test stack.
2227 {
2228 // Allocate number of buffers of varying size that surely fit into this block.
2229 for(size_t i = 0; i < maxBufCount; ++i)
2230 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002231 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002232 BufferInfo newBufInfo;
2233 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2234 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002235 TEST(res == VK_SUCCESS);
2236 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002237 bufInfo.push_back(newBufInfo);
2238 prevOffset = allocInfo.offset;
2239 }
2240
2241 // Destroy few buffers from top of the stack.
2242 for(size_t i = 0; i < maxBufCount / 5; ++i)
2243 {
2244 const BufferInfo& currBufInfo = bufInfo.back();
2245 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2246 bufInfo.pop_back();
2247 }
2248
2249 // Create some more
2250 for(size_t i = 0; i < maxBufCount / 5; ++i)
2251 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002252 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002253 BufferInfo newBufInfo;
2254 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2255 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002256 TEST(res == VK_SUCCESS);
2257 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002258 bufInfo.push_back(newBufInfo);
2259 prevOffset = allocInfo.offset;
2260 }
2261
2262 // Destroy the buffers in reverse order.
2263 while(!bufInfo.empty())
2264 {
2265 const BufferInfo& currBufInfo = bufInfo.back();
2266 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2267 bufInfo.pop_back();
2268 }
2269 }
2270
Adam Sawickiee082772018-06-20 17:45:49 +02002271 // Test ring buffer.
2272 {
2273 // Allocate number of buffers that surely fit into this block.
2274 bufCreateInfo.size = bufSizeMax;
2275 for(size_t i = 0; i < maxBufCount; ++i)
2276 {
2277 BufferInfo newBufInfo;
2278 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2279 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002280 TEST(res == VK_SUCCESS);
2281 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002282 bufInfo.push_back(newBufInfo);
2283 prevOffset = allocInfo.offset;
2284 }
2285
2286 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2287 const size_t buffersPerIter = maxBufCount / 10 - 1;
2288 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2289 for(size_t iter = 0; iter < iterCount; ++iter)
2290 {
2291 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2292 {
2293 const BufferInfo& currBufInfo = bufInfo.front();
2294 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2295 bufInfo.erase(bufInfo.begin());
2296 }
2297 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2298 {
2299 BufferInfo newBufInfo;
2300 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2301 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002302 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002303 bufInfo.push_back(newBufInfo);
2304 }
2305 }
2306
2307 // Allocate buffers until we reach out-of-memory.
2308 uint32_t debugIndex = 0;
2309 while(res == VK_SUCCESS)
2310 {
2311 BufferInfo newBufInfo;
2312 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2313 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2314 if(res == VK_SUCCESS)
2315 {
2316 bufInfo.push_back(newBufInfo);
2317 }
2318 else
2319 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002320 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002321 }
2322 ++debugIndex;
2323 }
2324
2325 // Destroy the buffers in random order.
2326 while(!bufInfo.empty())
2327 {
2328 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2329 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2330 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2331 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2332 }
2333 }
2334
Adam Sawicki680b2252018-08-22 14:47:32 +02002335 // Test double stack.
2336 {
2337 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2338 VkDeviceSize prevOffsetLower = 0;
2339 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2340 for(size_t i = 0; i < maxBufCount; ++i)
2341 {
2342 const bool upperAddress = (i % 2) != 0;
2343 if(upperAddress)
2344 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2345 else
2346 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002347 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002348 BufferInfo newBufInfo;
2349 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2350 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002351 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002352 if(upperAddress)
2353 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002354 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002355 prevOffsetUpper = allocInfo.offset;
2356 }
2357 else
2358 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002359 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002360 prevOffsetLower = allocInfo.offset;
2361 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002362 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002363 bufInfo.push_back(newBufInfo);
2364 }
2365
2366 // Destroy few buffers from top of the stack.
2367 for(size_t i = 0; i < maxBufCount / 5; ++i)
2368 {
2369 const BufferInfo& currBufInfo = bufInfo.back();
2370 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2371 bufInfo.pop_back();
2372 }
2373
2374 // Create some more
2375 for(size_t i = 0; i < maxBufCount / 5; ++i)
2376 {
2377 const bool upperAddress = (i % 2) != 0;
2378 if(upperAddress)
2379 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2380 else
2381 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002382 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002383 BufferInfo newBufInfo;
2384 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2385 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002386 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002387 bufInfo.push_back(newBufInfo);
2388 }
2389
2390 // Destroy the buffers in reverse order.
2391 while(!bufInfo.empty())
2392 {
2393 const BufferInfo& currBufInfo = bufInfo.back();
2394 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2395 bufInfo.pop_back();
2396 }
2397
2398 // Create buffers on both sides until we reach out of memory.
2399 prevOffsetLower = 0;
2400 prevOffsetUpper = poolCreateInfo.blockSize;
2401 res = VK_SUCCESS;
2402 for(size_t i = 0; res == VK_SUCCESS; ++i)
2403 {
2404 const bool upperAddress = (i % 2) != 0;
2405 if(upperAddress)
2406 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2407 else
2408 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002409 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002410 BufferInfo newBufInfo;
2411 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2412 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2413 if(res == VK_SUCCESS)
2414 {
2415 if(upperAddress)
2416 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002417 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002418 prevOffsetUpper = allocInfo.offset;
2419 }
2420 else
2421 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002422 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002423 prevOffsetLower = allocInfo.offset;
2424 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002425 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002426 bufInfo.push_back(newBufInfo);
2427 }
2428 }
2429
2430 // Destroy the buffers in random order.
2431 while(!bufInfo.empty())
2432 {
2433 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2434 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2435 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2436 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2437 }
2438
2439 // Create buffers on upper side only, constant size, until we reach out of memory.
2440 prevOffsetUpper = poolCreateInfo.blockSize;
2441 res = VK_SUCCESS;
2442 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2443 bufCreateInfo.size = bufSizeMax;
2444 for(size_t i = 0; res == VK_SUCCESS; ++i)
2445 {
2446 BufferInfo newBufInfo;
2447 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2448 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2449 if(res == VK_SUCCESS)
2450 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002451 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002452 prevOffsetUpper = allocInfo.offset;
2453 bufInfo.push_back(newBufInfo);
2454 }
2455 }
2456
2457 // Destroy the buffers in reverse order.
2458 while(!bufInfo.empty())
2459 {
2460 const BufferInfo& currBufInfo = bufInfo.back();
2461 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2462 bufInfo.pop_back();
2463 }
2464 }
2465
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002466 // Test ring buffer with lost allocations.
2467 {
2468 // Allocate number of buffers until pool is full.
2469 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2470 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2471 res = VK_SUCCESS;
2472 for(size_t i = 0; res == VK_SUCCESS; ++i)
2473 {
2474 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2475
Adam Sawickifd366b62019-01-24 15:26:43 +01002476 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002477
2478 BufferInfo newBufInfo;
2479 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2480 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2481 if(res == VK_SUCCESS)
2482 bufInfo.push_back(newBufInfo);
2483 }
2484
2485 // Free first half of it.
2486 {
2487 const size_t buffersToDelete = bufInfo.size() / 2;
2488 for(size_t i = 0; i < buffersToDelete; ++i)
2489 {
2490 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2491 }
2492 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2493 }
2494
2495 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002496 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002497 res = VK_SUCCESS;
2498 for(size_t i = 0; res == VK_SUCCESS; ++i)
2499 {
2500 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2501
Adam Sawickifd366b62019-01-24 15:26:43 +01002502 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002503
2504 BufferInfo newBufInfo;
2505 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2506 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2507 if(res == VK_SUCCESS)
2508 bufInfo.push_back(newBufInfo);
2509 }
2510
2511 VkDeviceSize firstNewOffset;
2512 {
2513 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2514
2515 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2516 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2517 bufCreateInfo.size = bufSizeMax;
2518
2519 BufferInfo newBufInfo;
2520 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2521 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002522 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002523 bufInfo.push_back(newBufInfo);
2524 firstNewOffset = allocInfo.offset;
2525
2526 // Make sure at least one buffer from the beginning became lost.
2527 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002528 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002529 }
2530
Adam Sawickifd366b62019-01-24 15:26:43 +01002531#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002532 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2533 size_t newCount = 1;
2534 for(;;)
2535 {
2536 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2537
Adam Sawickifd366b62019-01-24 15:26:43 +01002538 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002539
2540 BufferInfo newBufInfo;
2541 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2542 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01002543
Adam Sawickib8d34d52018-10-03 17:41:20 +02002544 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002545 bufInfo.push_back(newBufInfo);
2546 ++newCount;
2547 if(allocInfo.offset < firstNewOffset)
2548 break;
2549 }
Adam Sawickifd366b62019-01-24 15:26:43 +01002550#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002551
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002552 // Delete buffers that are lost.
2553 for(size_t i = bufInfo.size(); i--; )
2554 {
2555 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2556 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2557 {
2558 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2559 bufInfo.erase(bufInfo.begin() + i);
2560 }
2561 }
2562
2563 // Test vmaMakePoolAllocationsLost
2564 {
2565 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2566
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01002567 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002568 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002569 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002570
2571 size_t realLostAllocCount = 0;
2572 for(size_t i = 0; i < bufInfo.size(); ++i)
2573 {
2574 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2575 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2576 ++realLostAllocCount;
2577 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002578 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002579 }
2580
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002581 // Destroy all the buffers in forward order.
2582 for(size_t i = 0; i < bufInfo.size(); ++i)
2583 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2584 bufInfo.clear();
2585 }
2586
Adam Sawicki70a683e2018-08-24 15:36:32 +02002587 vmaDestroyPool(g_hAllocator, pool);
2588}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002589
Adam Sawicki70a683e2018-08-24 15:36:32 +02002590static void TestLinearAllocatorMultiBlock()
2591{
2592 wprintf(L"Test linear allocator multi block\n");
2593
2594 RandomNumberGenerator rand{345673};
2595
2596 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2597 sampleBufCreateInfo.size = 1024 * 1024;
2598 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2599
2600 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2601 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2602
2603 VmaPoolCreateInfo poolCreateInfo = {};
2604 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2605 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002606 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002607
2608 VmaPool pool = nullptr;
2609 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002610 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002611
2612 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2613
2614 VmaAllocationCreateInfo allocCreateInfo = {};
2615 allocCreateInfo.pool = pool;
2616
2617 std::vector<BufferInfo> bufInfo;
2618 VmaAllocationInfo allocInfo;
2619
2620 // Test one-time free.
2621 {
2622 // Allocate buffers until we move to a second block.
2623 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2624 for(uint32_t i = 0; ; ++i)
2625 {
2626 BufferInfo newBufInfo;
2627 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2628 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002629 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002630 bufInfo.push_back(newBufInfo);
2631 if(lastMem && allocInfo.deviceMemory != lastMem)
2632 {
2633 break;
2634 }
2635 lastMem = allocInfo.deviceMemory;
2636 }
2637
Adam Sawickib8d34d52018-10-03 17:41:20 +02002638 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002639
2640 // Make sure that pool has now two blocks.
2641 VmaPoolStats poolStats = {};
2642 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002643 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002644
2645 // Destroy all the buffers in random order.
2646 while(!bufInfo.empty())
2647 {
2648 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2649 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2650 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2651 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2652 }
2653
2654 // Make sure that pool has now at most one block.
2655 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002656 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002657 }
2658
2659 // Test stack.
2660 {
2661 // Allocate buffers until we move to a second block.
2662 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2663 for(uint32_t i = 0; ; ++i)
2664 {
2665 BufferInfo newBufInfo;
2666 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2667 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002668 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002669 bufInfo.push_back(newBufInfo);
2670 if(lastMem && allocInfo.deviceMemory != lastMem)
2671 {
2672 break;
2673 }
2674 lastMem = allocInfo.deviceMemory;
2675 }
2676
Adam Sawickib8d34d52018-10-03 17:41:20 +02002677 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002678
2679 // Add few more buffers.
2680 for(uint32_t i = 0; i < 5; ++i)
2681 {
2682 BufferInfo newBufInfo;
2683 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2684 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002685 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002686 bufInfo.push_back(newBufInfo);
2687 }
2688
2689 // Make sure that pool has now two blocks.
2690 VmaPoolStats poolStats = {};
2691 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002692 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002693
2694 // Delete half of buffers, LIFO.
2695 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2696 {
2697 const BufferInfo& currBufInfo = bufInfo.back();
2698 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2699 bufInfo.pop_back();
2700 }
2701
2702 // Add one more buffer.
2703 BufferInfo newBufInfo;
2704 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2705 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002706 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002707 bufInfo.push_back(newBufInfo);
2708
2709 // Make sure that pool has now one block.
2710 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002711 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002712
2713 // Delete all the remaining buffers, LIFO.
2714 while(!bufInfo.empty())
2715 {
2716 const BufferInfo& currBufInfo = bufInfo.back();
2717 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2718 bufInfo.pop_back();
2719 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002720 }
2721
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002722 vmaDestroyPool(g_hAllocator, pool);
2723}
2724
Adam Sawickifd11d752018-08-22 15:02:10 +02002725static void ManuallyTestLinearAllocator()
2726{
2727 VmaStats origStats;
2728 vmaCalculateStats(g_hAllocator, &origStats);
2729
2730 wprintf(L"Manually test linear allocator\n");
2731
2732 RandomNumberGenerator rand{645332};
2733
2734 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2735 sampleBufCreateInfo.size = 1024; // Whatever.
2736 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2737
2738 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2739 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2740
2741 VmaPoolCreateInfo poolCreateInfo = {};
2742 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002743 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002744
2745 poolCreateInfo.blockSize = 10 * 1024;
2746 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2747 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2748
2749 VmaPool pool = nullptr;
2750 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002751 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002752
2753 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2754
2755 VmaAllocationCreateInfo allocCreateInfo = {};
2756 allocCreateInfo.pool = pool;
2757
2758 std::vector<BufferInfo> bufInfo;
2759 VmaAllocationInfo allocInfo;
2760 BufferInfo newBufInfo;
2761
2762 // Test double stack.
2763 {
2764 /*
2765 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2766 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2767
2768 Totally:
2769 1 block allocated
2770 10240 Vulkan bytes
2771 6 new allocations
2772 2256 bytes in allocations
2773 */
2774
2775 bufCreateInfo.size = 32;
2776 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2777 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002778 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002779 bufInfo.push_back(newBufInfo);
2780
2781 bufCreateInfo.size = 1024;
2782 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2783 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002784 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002785 bufInfo.push_back(newBufInfo);
2786
2787 bufCreateInfo.size = 32;
2788 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2789 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002790 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002791 bufInfo.push_back(newBufInfo);
2792
2793 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2794
2795 bufCreateInfo.size = 128;
2796 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2797 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002798 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002799 bufInfo.push_back(newBufInfo);
2800
2801 bufCreateInfo.size = 1024;
2802 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2803 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002804 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002805 bufInfo.push_back(newBufInfo);
2806
2807 bufCreateInfo.size = 16;
2808 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2809 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002810 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002811 bufInfo.push_back(newBufInfo);
2812
2813 VmaStats currStats;
2814 vmaCalculateStats(g_hAllocator, &currStats);
2815 VmaPoolStats poolStats;
2816 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2817
2818 char* statsStr = nullptr;
2819 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2820
2821 // PUT BREAKPOINT HERE TO CHECK.
2822 // Inspect: currStats versus origStats, poolStats, statsStr.
2823 int I = 0;
2824
2825 vmaFreeStatsString(g_hAllocator, statsStr);
2826
2827 // Destroy the buffers in reverse order.
2828 while(!bufInfo.empty())
2829 {
2830 const BufferInfo& currBufInfo = bufInfo.back();
2831 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2832 bufInfo.pop_back();
2833 }
2834 }
2835
2836 vmaDestroyPool(g_hAllocator, pool);
2837}
2838
Adam Sawicki80927152018-09-07 17:27:23 +02002839static void BenchmarkAlgorithmsCase(FILE* file,
2840 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002841 bool empty,
2842 VmaAllocationCreateFlags allocStrategy,
2843 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002844{
2845 RandomNumberGenerator rand{16223};
2846
2847 const VkDeviceSize bufSizeMin = 32;
2848 const VkDeviceSize bufSizeMax = 1024;
2849 const size_t maxBufCapacity = 10000;
2850 const uint32_t iterationCount = 10;
2851
2852 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2853 sampleBufCreateInfo.size = bufSizeMax;
2854 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2855
2856 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2857 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2858
2859 VmaPoolCreateInfo poolCreateInfo = {};
2860 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002861 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002862
2863 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002864 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002865 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2866
2867 VmaPool pool = nullptr;
2868 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002869 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002870
2871 // Buffer created just to get memory requirements. Never bound to any memory.
2872 VkBuffer dummyBuffer = VK_NULL_HANDLE;
Adam Sawicki1f84f622019-07-02 13:40:01 +02002873 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002874 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002875
2876 VkMemoryRequirements memReq = {};
2877 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2878
Adam Sawicki1f84f622019-07-02 13:40:01 +02002879 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawicki0a607132018-08-24 11:18:41 +02002880
2881 VmaAllocationCreateInfo allocCreateInfo = {};
2882 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002883 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002884
2885 VmaAllocation alloc;
2886 std::vector<VmaAllocation> baseAllocations;
2887
2888 if(!empty)
2889 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002890 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002891 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002892 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002893 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01002894 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
2895 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02002896 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2897 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002898 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002899 baseAllocations.push_back(alloc);
2900 totalSize += memReq.size;
2901 }
2902
2903 // Delete half of them, choose randomly.
2904 size_t allocsToDelete = baseAllocations.size() / 2;
2905 for(size_t i = 0; i < allocsToDelete; ++i)
2906 {
2907 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2908 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2909 baseAllocations.erase(baseAllocations.begin() + index);
2910 }
2911 }
2912
2913 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002914 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002915 std::vector<VmaAllocation> testAllocations;
2916 testAllocations.reserve(allocCount);
2917 duration allocTotalDuration = duration::zero();
2918 duration freeTotalDuration = duration::zero();
2919 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2920 {
2921 // Allocations
2922 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2923 for(size_t i = 0; i < allocCount; ++i)
2924 {
2925 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2926 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002927 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002928 testAllocations.push_back(alloc);
2929 }
2930 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2931
2932 // Deallocations
2933 switch(freeOrder)
2934 {
2935 case FREE_ORDER::FORWARD:
2936 // Leave testAllocations unchanged.
2937 break;
2938 case FREE_ORDER::BACKWARD:
2939 std::reverse(testAllocations.begin(), testAllocations.end());
2940 break;
2941 case FREE_ORDER::RANDOM:
2942 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2943 break;
2944 default: assert(0);
2945 }
2946
2947 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2948 for(size_t i = 0; i < allocCount; ++i)
2949 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2950 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2951
2952 testAllocations.clear();
2953 }
2954
2955 // Delete baseAllocations
2956 while(!baseAllocations.empty())
2957 {
2958 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2959 baseAllocations.pop_back();
2960 }
2961
2962 vmaDestroyPool(g_hAllocator, pool);
2963
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002964 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2965 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2966
Adam Sawicki80927152018-09-07 17:27:23 +02002967 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2968 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002969 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002970 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002971 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002972 allocTotalSeconds,
2973 freeTotalSeconds);
2974
2975 if(file)
2976 {
2977 std::string currTime;
2978 CurrentTimeToStr(currTime);
2979
Adam Sawicki80927152018-09-07 17:27:23 +02002980 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002981 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002982 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002983 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002984 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002985 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2986 allocTotalSeconds,
2987 freeTotalSeconds);
2988 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002989}
2990
Adam Sawicki80927152018-09-07 17:27:23 +02002991static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002992{
Adam Sawicki80927152018-09-07 17:27:23 +02002993 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002994
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002995 if(file)
2996 {
2997 fprintf(file,
2998 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002999 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003000 "Allocation time (s),Deallocation time (s)\n");
3001 }
3002
Adam Sawicki0a607132018-08-24 11:18:41 +02003003 uint32_t freeOrderCount = 1;
3004 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
3005 freeOrderCount = 3;
3006 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
3007 freeOrderCount = 2;
3008
3009 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003010 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003011
3012 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3013 {
3014 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3015 switch(freeOrderIndex)
3016 {
3017 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3018 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3019 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3020 default: assert(0);
3021 }
3022
3023 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3024 {
Adam Sawicki80927152018-09-07 17:27:23 +02003025 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003026 {
Adam Sawicki80927152018-09-07 17:27:23 +02003027 uint32_t algorithm = 0;
3028 switch(algorithmIndex)
3029 {
3030 case 0:
3031 break;
3032 case 1:
3033 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3034 break;
3035 case 2:
3036 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3037 break;
3038 default:
3039 assert(0);
3040 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003041
Adam Sawicki80927152018-09-07 17:27:23 +02003042 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003043 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3044 {
3045 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003046 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003047 {
3048 switch(allocStrategyIndex)
3049 {
3050 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3051 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3052 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3053 default: assert(0);
3054 }
3055 }
3056
Adam Sawicki80927152018-09-07 17:27:23 +02003057 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003058 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003059 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003060 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003061 strategy,
3062 freeOrder); // freeOrder
3063 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003064 }
3065 }
3066 }
3067}
3068
Adam Sawickib8333fb2018-03-13 16:15:53 +01003069static void TestPool_SameSize()
3070{
3071 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3072 const size_t BUF_COUNT = 100;
3073 VkResult res;
3074
3075 RandomNumberGenerator rand{123};
3076
3077 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3078 bufferInfo.size = BUF_SIZE;
3079 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3080
3081 uint32_t memoryTypeBits = UINT32_MAX;
3082 {
3083 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003084 res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003085 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003086
3087 VkMemoryRequirements memReq;
3088 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3089 memoryTypeBits = memReq.memoryTypeBits;
3090
Adam Sawicki1f84f622019-07-02 13:40:01 +02003091 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003092 }
3093
3094 VmaAllocationCreateInfo poolAllocInfo = {};
3095 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3096 uint32_t memTypeIndex;
3097 res = vmaFindMemoryTypeIndex(
3098 g_hAllocator,
3099 memoryTypeBits,
3100 &poolAllocInfo,
3101 &memTypeIndex);
3102
3103 VmaPoolCreateInfo poolCreateInfo = {};
3104 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3105 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3106 poolCreateInfo.minBlockCount = 1;
3107 poolCreateInfo.maxBlockCount = 4;
3108 poolCreateInfo.frameInUseCount = 0;
3109
3110 VmaPool pool;
3111 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003112 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003113
3114 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3115
3116 VmaAllocationCreateInfo allocInfo = {};
3117 allocInfo.pool = pool;
3118 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3119 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3120
3121 struct BufItem
3122 {
3123 VkBuffer Buf;
3124 VmaAllocation Alloc;
3125 };
3126 std::vector<BufItem> items;
3127
3128 // Fill entire pool.
3129 for(size_t i = 0; i < BUF_COUNT; ++i)
3130 {
3131 BufItem item;
3132 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003133 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003134 items.push_back(item);
3135 }
3136
3137 // Make sure that another allocation would fail.
3138 {
3139 BufItem item;
3140 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003141 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003142 }
3143
3144 // Validate that no buffer is lost. Also check that they are not mapped.
3145 for(size_t i = 0; i < items.size(); ++i)
3146 {
3147 VmaAllocationInfo allocInfo;
3148 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003149 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3150 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003151 }
3152
3153 // Free some percent of random items.
3154 {
3155 const size_t PERCENT_TO_FREE = 10;
3156 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3157 for(size_t i = 0; i < itemsToFree; ++i)
3158 {
3159 size_t index = (size_t)rand.Generate() % items.size();
3160 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3161 items.erase(items.begin() + index);
3162 }
3163 }
3164
3165 // Randomly allocate and free items.
3166 {
3167 const size_t OPERATION_COUNT = BUF_COUNT;
3168 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3169 {
3170 bool allocate = rand.Generate() % 2 != 0;
3171 if(allocate)
3172 {
3173 if(items.size() < BUF_COUNT)
3174 {
3175 BufItem item;
3176 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003177 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003178 items.push_back(item);
3179 }
3180 }
3181 else // Free
3182 {
3183 if(!items.empty())
3184 {
3185 size_t index = (size_t)rand.Generate() % items.size();
3186 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3187 items.erase(items.begin() + index);
3188 }
3189 }
3190 }
3191 }
3192
3193 // Allocate up to maximum.
3194 while(items.size() < BUF_COUNT)
3195 {
3196 BufItem item;
3197 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003198 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003199 items.push_back(item);
3200 }
3201
3202 // Validate that no buffer is lost.
3203 for(size_t i = 0; i < items.size(); ++i)
3204 {
3205 VmaAllocationInfo allocInfo;
3206 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003207 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003208 }
3209
3210 // Next frame.
3211 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3212
3213 // Allocate another BUF_COUNT buffers.
3214 for(size_t i = 0; i < BUF_COUNT; ++i)
3215 {
3216 BufItem item;
3217 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003218 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003219 items.push_back(item);
3220 }
3221
3222 // Make sure the first BUF_COUNT is lost. Delete them.
3223 for(size_t i = 0; i < BUF_COUNT; ++i)
3224 {
3225 VmaAllocationInfo allocInfo;
3226 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003227 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003228 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3229 }
3230 items.erase(items.begin(), items.begin() + BUF_COUNT);
3231
3232 // Validate that no buffer is lost.
3233 for(size_t i = 0; i < items.size(); ++i)
3234 {
3235 VmaAllocationInfo allocInfo;
3236 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003237 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003238 }
3239
3240 // Free one item.
3241 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3242 items.pop_back();
3243
3244 // Validate statistics.
3245 {
3246 VmaPoolStats poolStats = {};
3247 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003248 TEST(poolStats.allocationCount == items.size());
3249 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3250 TEST(poolStats.unusedRangeCount == 1);
3251 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3252 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003253 }
3254
3255 // Free all remaining items.
3256 for(size_t i = items.size(); i--; )
3257 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3258 items.clear();
3259
3260 // Allocate maximum items again.
3261 for(size_t i = 0; i < BUF_COUNT; ++i)
3262 {
3263 BufItem item;
3264 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003265 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003266 items.push_back(item);
3267 }
3268
3269 // Delete every other item.
3270 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3271 {
3272 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3273 items.erase(items.begin() + i);
3274 }
3275
3276 // Defragment!
3277 {
3278 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3279 for(size_t i = 0; i < items.size(); ++i)
3280 allocationsToDefragment[i] = items[i].Alloc;
3281
3282 VmaDefragmentationStats defragmentationStats;
3283 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003284 TEST(res == VK_SUCCESS);
3285 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003286 }
3287
3288 // Free all remaining items.
3289 for(size_t i = items.size(); i--; )
3290 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3291 items.clear();
3292
3293 ////////////////////////////////////////////////////////////////////////////////
3294 // Test for vmaMakePoolAllocationsLost
3295
3296 // Allocate 4 buffers on frame 10.
3297 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3298 for(size_t i = 0; i < 4; ++i)
3299 {
3300 BufItem item;
3301 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003302 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003303 items.push_back(item);
3304 }
3305
3306 // Touch first 2 of them on frame 11.
3307 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3308 for(size_t i = 0; i < 2; ++i)
3309 {
3310 VmaAllocationInfo allocInfo;
3311 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3312 }
3313
3314 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3315 size_t lostCount = 0xDEADC0DE;
3316 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003317 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003318
3319 // Make another call. Now 0 should be lost.
3320 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003321 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003322
3323 // Make another call, with null count. Should not crash.
3324 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3325
3326 // END: Free all remaining items.
3327 for(size_t i = items.size(); i--; )
3328 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3329
3330 items.clear();
3331
Adam Sawickid2924172018-06-11 12:48:46 +02003332 ////////////////////////////////////////////////////////////////////////////////
3333 // Test for allocation too large for pool
3334
3335 {
3336 VmaAllocationCreateInfo allocCreateInfo = {};
3337 allocCreateInfo.pool = pool;
3338
3339 VkMemoryRequirements memReq;
3340 memReq.memoryTypeBits = UINT32_MAX;
3341 memReq.alignment = 1;
3342 memReq.size = poolCreateInfo.blockSize + 4;
3343
3344 VmaAllocation alloc = nullptr;
3345 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003346 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003347 }
3348
Adam Sawickib8333fb2018-03-13 16:15:53 +01003349 vmaDestroyPool(g_hAllocator, pool);
3350}
3351
Adam Sawickie44c6262018-06-15 14:30:39 +02003352static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3353{
3354 const uint8_t* pBytes = (const uint8_t*)pMemory;
3355 for(size_t i = 0; i < size; ++i)
3356 {
3357 if(pBytes[i] != pattern)
3358 {
3359 return false;
3360 }
3361 }
3362 return true;
3363}
3364
3365static void TestAllocationsInitialization()
3366{
3367 VkResult res;
3368
3369 const size_t BUF_SIZE = 1024;
3370
3371 // Create pool.
3372
3373 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3374 bufInfo.size = BUF_SIZE;
3375 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3376
3377 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3378 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3379
3380 VmaPoolCreateInfo poolCreateInfo = {};
3381 poolCreateInfo.blockSize = BUF_SIZE * 10;
3382 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3383 poolCreateInfo.maxBlockCount = 1;
3384 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003385 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003386
3387 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3388 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003389 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003390
3391 // Create one persistently mapped buffer to keep memory of this block mapped,
3392 // so that pointer to mapped data will remain (more or less...) valid even
3393 // after destruction of other allocations.
3394
3395 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3396 VkBuffer firstBuf;
3397 VmaAllocation firstAlloc;
3398 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003399 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003400
3401 // Test buffers.
3402
3403 for(uint32_t i = 0; i < 2; ++i)
3404 {
3405 const bool persistentlyMapped = i == 0;
3406 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3407 VkBuffer buf;
3408 VmaAllocation alloc;
3409 VmaAllocationInfo allocInfo;
3410 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003411 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003412
3413 void* pMappedData;
3414 if(!persistentlyMapped)
3415 {
3416 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003417 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003418 }
3419 else
3420 {
3421 pMappedData = allocInfo.pMappedData;
3422 }
3423
3424 // Validate initialized content
3425 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003426 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003427
3428 if(!persistentlyMapped)
3429 {
3430 vmaUnmapMemory(g_hAllocator, alloc);
3431 }
3432
3433 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3434
3435 // Validate freed content
3436 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003437 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003438 }
3439
3440 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3441 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3442}
3443
Adam Sawickib8333fb2018-03-13 16:15:53 +01003444static void TestPool_Benchmark(
3445 PoolTestResult& outResult,
3446 const PoolTestConfig& config)
3447{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003448 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003449
3450 RandomNumberGenerator mainRand{config.RandSeed};
3451
3452 uint32_t allocationSizeProbabilitySum = std::accumulate(
3453 config.AllocationSizes.begin(),
3454 config.AllocationSizes.end(),
3455 0u,
3456 [](uint32_t sum, const AllocationSize& allocSize) {
3457 return sum + allocSize.Probability;
3458 });
3459
3460 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3461 bufferInfo.size = 256; // Whatever.
3462 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3463
3464 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3465 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3466 imageInfo.extent.width = 256; // Whatever.
3467 imageInfo.extent.height = 256; // Whatever.
3468 imageInfo.extent.depth = 1;
3469 imageInfo.mipLevels = 1;
3470 imageInfo.arrayLayers = 1;
3471 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3472 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3473 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3474 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3475 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3476
3477 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3478 {
3479 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003480 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003481 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003482
3483 VkMemoryRequirements memReq;
3484 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3485 bufferMemoryTypeBits = memReq.memoryTypeBits;
3486
Adam Sawicki1f84f622019-07-02 13:40:01 +02003487 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003488 }
3489
3490 uint32_t imageMemoryTypeBits = UINT32_MAX;
3491 {
3492 VkImage dummyImage;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003493 VkResult res = vkCreateImage(g_hDevice, &imageInfo, g_Allocs, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003494 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003495
3496 VkMemoryRequirements memReq;
3497 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3498 imageMemoryTypeBits = memReq.memoryTypeBits;
3499
Adam Sawicki1f84f622019-07-02 13:40:01 +02003500 vkDestroyImage(g_hDevice, dummyImage, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003501 }
3502
3503 uint32_t memoryTypeBits = 0;
3504 if(config.UsesBuffers() && config.UsesImages())
3505 {
3506 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3507 if(memoryTypeBits == 0)
3508 {
3509 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3510 return;
3511 }
3512 }
3513 else if(config.UsesBuffers())
3514 memoryTypeBits = bufferMemoryTypeBits;
3515 else if(config.UsesImages())
3516 memoryTypeBits = imageMemoryTypeBits;
3517 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003518 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003519
3520 VmaPoolCreateInfo poolCreateInfo = {};
3521 poolCreateInfo.memoryTypeIndex = 0;
3522 poolCreateInfo.minBlockCount = 1;
3523 poolCreateInfo.maxBlockCount = 1;
3524 poolCreateInfo.blockSize = config.PoolSize;
3525 poolCreateInfo.frameInUseCount = 1;
3526
3527 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3528 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3529 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3530
3531 VmaPool pool;
3532 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003533 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003534
3535 // Start time measurement - after creating pool and initializing data structures.
3536 time_point timeBeg = std::chrono::high_resolution_clock::now();
3537
3538 ////////////////////////////////////////////////////////////////////////////////
3539 // ThreadProc
3540 auto ThreadProc = [&](
3541 PoolTestThreadResult* outThreadResult,
3542 uint32_t randSeed,
3543 HANDLE frameStartEvent,
3544 HANDLE frameEndEvent) -> void
3545 {
3546 RandomNumberGenerator threadRand{randSeed};
3547
3548 outThreadResult->AllocationTimeMin = duration::max();
3549 outThreadResult->AllocationTimeSum = duration::zero();
3550 outThreadResult->AllocationTimeMax = duration::min();
3551 outThreadResult->DeallocationTimeMin = duration::max();
3552 outThreadResult->DeallocationTimeSum = duration::zero();
3553 outThreadResult->DeallocationTimeMax = duration::min();
3554 outThreadResult->AllocationCount = 0;
3555 outThreadResult->DeallocationCount = 0;
3556 outThreadResult->LostAllocationCount = 0;
3557 outThreadResult->LostAllocationTotalSize = 0;
3558 outThreadResult->FailedAllocationCount = 0;
3559 outThreadResult->FailedAllocationTotalSize = 0;
3560
3561 struct Item
3562 {
3563 VkDeviceSize BufferSize;
3564 VkExtent2D ImageSize;
3565 VkBuffer Buf;
3566 VkImage Image;
3567 VmaAllocation Alloc;
3568
3569 VkDeviceSize CalcSizeBytes() const
3570 {
3571 return BufferSize +
3572 ImageSize.width * ImageSize.height * 4;
3573 }
3574 };
3575 std::vector<Item> unusedItems, usedItems;
3576
3577 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3578
3579 // Create all items - all unused, not yet allocated.
3580 for(size_t i = 0; i < threadTotalItemCount; ++i)
3581 {
3582 Item item = {};
3583
3584 uint32_t allocSizeIndex = 0;
3585 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3586 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3587 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3588
3589 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3590 if(allocSize.BufferSizeMax > 0)
3591 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003592 TEST(allocSize.BufferSizeMin > 0);
3593 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003594 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3595 item.BufferSize = allocSize.BufferSizeMin;
3596 else
3597 {
3598 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3599 item.BufferSize = item.BufferSize / 16 * 16;
3600 }
3601 }
3602 else
3603 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003604 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003605 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3606 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3607 else
3608 {
3609 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3610 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3611 }
3612 }
3613
3614 unusedItems.push_back(item);
3615 }
3616
3617 auto Allocate = [&](Item& item) -> VkResult
3618 {
3619 VmaAllocationCreateInfo allocCreateInfo = {};
3620 allocCreateInfo.pool = pool;
3621 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3622 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3623
3624 if(item.BufferSize)
3625 {
3626 bufferInfo.size = item.BufferSize;
3627 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3628 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3629 }
3630 else
3631 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003632 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003633
3634 imageInfo.extent.width = item.ImageSize.width;
3635 imageInfo.extent.height = item.ImageSize.height;
3636 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3637 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3638 }
3639 };
3640
3641 ////////////////////////////////////////////////////////////////////////////////
3642 // Frames
3643 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3644 {
3645 WaitForSingleObject(frameStartEvent, INFINITE);
3646
3647 // Always make some percent of used bufs unused, to choose different used ones.
3648 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3649 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3650 {
3651 size_t index = threadRand.Generate() % usedItems.size();
3652 unusedItems.push_back(usedItems[index]);
3653 usedItems.erase(usedItems.begin() + index);
3654 }
3655
3656 // Determine which bufs we want to use in this frame.
3657 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3658 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003659 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003660 // Move some used to unused.
3661 while(usedBufCount < usedItems.size())
3662 {
3663 size_t index = threadRand.Generate() % usedItems.size();
3664 unusedItems.push_back(usedItems[index]);
3665 usedItems.erase(usedItems.begin() + index);
3666 }
3667 // Move some unused to used.
3668 while(usedBufCount > usedItems.size())
3669 {
3670 size_t index = threadRand.Generate() % unusedItems.size();
3671 usedItems.push_back(unusedItems[index]);
3672 unusedItems.erase(unusedItems.begin() + index);
3673 }
3674
3675 uint32_t touchExistingCount = 0;
3676 uint32_t touchLostCount = 0;
3677 uint32_t createSucceededCount = 0;
3678 uint32_t createFailedCount = 0;
3679
3680 // Touch all used bufs. If not created or lost, allocate.
3681 for(size_t i = 0; i < usedItems.size(); ++i)
3682 {
3683 Item& item = usedItems[i];
3684 // Not yet created.
3685 if(item.Alloc == VK_NULL_HANDLE)
3686 {
3687 res = Allocate(item);
3688 ++outThreadResult->AllocationCount;
3689 if(res != VK_SUCCESS)
3690 {
3691 item.Alloc = VK_NULL_HANDLE;
3692 item.Buf = VK_NULL_HANDLE;
3693 ++outThreadResult->FailedAllocationCount;
3694 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3695 ++createFailedCount;
3696 }
3697 else
3698 ++createSucceededCount;
3699 }
3700 else
3701 {
3702 // Touch.
3703 VmaAllocationInfo allocInfo;
3704 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3705 // Lost.
3706 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3707 {
3708 ++touchLostCount;
3709
3710 // Destroy.
3711 {
3712 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3713 if(item.Buf)
3714 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3715 else
3716 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3717 ++outThreadResult->DeallocationCount;
3718 }
3719 item.Alloc = VK_NULL_HANDLE;
3720 item.Buf = VK_NULL_HANDLE;
3721
3722 ++outThreadResult->LostAllocationCount;
3723 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3724
3725 // Recreate.
3726 res = Allocate(item);
3727 ++outThreadResult->AllocationCount;
3728 // Creation failed.
3729 if(res != VK_SUCCESS)
3730 {
3731 ++outThreadResult->FailedAllocationCount;
3732 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3733 ++createFailedCount;
3734 }
3735 else
3736 ++createSucceededCount;
3737 }
3738 else
3739 ++touchExistingCount;
3740 }
3741 }
3742
3743 /*
3744 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3745 randSeed, frameIndex,
3746 touchExistingCount, touchLostCount,
3747 createSucceededCount, createFailedCount);
3748 */
3749
3750 SetEvent(frameEndEvent);
3751 }
3752
3753 // Free all remaining items.
3754 for(size_t i = usedItems.size(); i--; )
3755 {
3756 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3757 if(usedItems[i].Buf)
3758 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3759 else
3760 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3761 ++outThreadResult->DeallocationCount;
3762 }
3763 for(size_t i = unusedItems.size(); i--; )
3764 {
3765 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3766 if(unusedItems[i].Buf)
3767 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3768 else
3769 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3770 ++outThreadResult->DeallocationCount;
3771 }
3772 };
3773
3774 // Launch threads.
3775 uint32_t threadRandSeed = mainRand.Generate();
3776 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3777 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3778 std::vector<std::thread> bkgThreads;
3779 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3780 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3781 {
3782 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3783 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3784 bkgThreads.emplace_back(std::bind(
3785 ThreadProc,
3786 &threadResults[threadIndex],
3787 threadRandSeed + threadIndex,
3788 frameStartEvents[threadIndex],
3789 frameEndEvents[threadIndex]));
3790 }
3791
3792 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003793 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003794 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3795 {
3796 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3797 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3798 SetEvent(frameStartEvents[threadIndex]);
3799 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3800 }
3801
3802 // Wait for threads finished
3803 for(size_t i = 0; i < bkgThreads.size(); ++i)
3804 {
3805 bkgThreads[i].join();
3806 CloseHandle(frameEndEvents[i]);
3807 CloseHandle(frameStartEvents[i]);
3808 }
3809 bkgThreads.clear();
3810
3811 // Finish time measurement - before destroying pool.
3812 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3813
3814 vmaDestroyPool(g_hAllocator, pool);
3815
3816 outResult.AllocationTimeMin = duration::max();
3817 outResult.AllocationTimeAvg = duration::zero();
3818 outResult.AllocationTimeMax = duration::min();
3819 outResult.DeallocationTimeMin = duration::max();
3820 outResult.DeallocationTimeAvg = duration::zero();
3821 outResult.DeallocationTimeMax = duration::min();
3822 outResult.LostAllocationCount = 0;
3823 outResult.LostAllocationTotalSize = 0;
3824 outResult.FailedAllocationCount = 0;
3825 outResult.FailedAllocationTotalSize = 0;
3826 size_t allocationCount = 0;
3827 size_t deallocationCount = 0;
3828 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3829 {
3830 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3831 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3832 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3833 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3834 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3835 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3836 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3837 allocationCount += threadResult.AllocationCount;
3838 deallocationCount += threadResult.DeallocationCount;
3839 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3840 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3841 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3842 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3843 }
3844 if(allocationCount)
3845 outResult.AllocationTimeAvg /= allocationCount;
3846 if(deallocationCount)
3847 outResult.DeallocationTimeAvg /= deallocationCount;
3848}
3849
3850static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3851{
3852 if(ptr1 < ptr2)
3853 return ptr1 + size1 > ptr2;
3854 else if(ptr2 < ptr1)
3855 return ptr2 + size2 > ptr1;
3856 else
3857 return true;
3858}
3859
Adam Sawicki40ffe982019-10-11 15:56:02 +02003860static void TestBudget()
3861{
3862 wprintf(L"Testing budget...\n");
3863
3864 uint32_t memTypeIndex = UINT32_MAX;
3865
Adam Sawicki353e3672019-11-02 14:12:05 +01003866 static const VkDeviceSize BUF_SIZE = 100ull * 1024 * 1024;
3867 static const uint32_t BUF_COUNT = 4;
Adam Sawicki40ffe982019-10-11 15:56:02 +02003868
3869 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
3870 {
Adam Sawicki353e3672019-11-02 14:12:05 +01003871 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3872
3873 VmaBudget budgetBeg[VK_MAX_MEMORY_HEAPS] = {};
3874 vmaGetBudget(g_hAllocator, budgetBeg);
Adam Sawicki40ffe982019-10-11 15:56:02 +02003875
3876 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3877 bufInfo.size = BUF_SIZE;
3878 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3879
3880 VmaAllocationCreateInfo allocCreateInfo = {};
3881 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3882 if(testIndex == 0)
3883 {
3884 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3885 }
3886
3887 // CREATE BUFFERS
3888 uint32_t heapIndex = 0;
3889 BufferInfo bufInfos[BUF_COUNT] = {};
3890 for(uint32_t bufIndex = 0; bufIndex < BUF_COUNT; ++bufIndex)
3891 {
3892 VmaAllocationInfo allocInfo;
3893 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3894 &bufInfos[bufIndex].Buffer, &bufInfos[bufIndex].Allocation, &allocInfo);
3895 TEST(res == VK_SUCCESS);
3896 if(bufIndex == 0)
3897 {
3898 heapIndex = MemoryTypeToHeap(allocInfo.memoryType);
3899 }
3900 else
3901 {
3902 // All buffers need to fall into the same heap.
3903 TEST(MemoryTypeToHeap(allocInfo.memoryType) == heapIndex);
3904 }
3905 }
3906
Adam Sawicki353e3672019-11-02 14:12:05 +01003907 VmaBudget budgetWithBufs[VK_MAX_MEMORY_HEAPS] = {};
3908 vmaGetBudget(g_hAllocator, budgetWithBufs);
Adam Sawicki40ffe982019-10-11 15:56:02 +02003909
3910 // DESTROY BUFFERS
3911 for(size_t bufIndex = BUF_COUNT; bufIndex--; )
3912 {
3913 vmaDestroyBuffer(g_hAllocator, bufInfos[bufIndex].Buffer, bufInfos[bufIndex].Allocation);
3914 }
3915
Adam Sawicki353e3672019-11-02 14:12:05 +01003916 VmaBudget budgetEnd[VK_MAX_MEMORY_HEAPS] = {};
3917 vmaGetBudget(g_hAllocator, budgetEnd);
Adam Sawicki40ffe982019-10-11 15:56:02 +02003918
3919 // CHECK
3920 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
3921 {
Adam Sawicki353e3672019-11-02 14:12:05 +01003922 TEST(budgetEnd[i].allocationBytes <= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02003923 if(i == heapIndex)
3924 {
Adam Sawicki353e3672019-11-02 14:12:05 +01003925 TEST(budgetEnd[i].allocationBytes == budgetBeg[i].allocationBytes);
3926 TEST(budgetWithBufs[i].allocationBytes == budgetBeg[i].allocationBytes + BUF_SIZE * BUF_COUNT);
3927 TEST(budgetWithBufs[i].blockBytes >= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02003928 }
3929 else
3930 {
Adam Sawicki353e3672019-11-02 14:12:05 +01003931 TEST(budgetEnd[i].allocationBytes == budgetEnd[i].allocationBytes &&
3932 budgetEnd[i].allocationBytes == budgetWithBufs[i].allocationBytes);
3933 TEST(budgetEnd[i].blockBytes == budgetEnd[i].blockBytes &&
3934 budgetEnd[i].blockBytes == budgetWithBufs[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02003935 }
3936 }
3937 }
Adam Sawicki353e3672019-11-02 14:12:05 +01003938
3939 // DELME
3940
3941 {
3942 std::vector<BufferInfo> buffers;
3943
3944 for(uint32_t i = 0; i < 160; ++i)
3945 {
3946 //vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3947
3948 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3949 bufCreateInfo.size = 50ull * 1024 * 1024;
3950 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3951
3952 VmaAllocationCreateInfo allocCreateInfo = {};
3953 //allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3954 allocCreateInfo.memoryTypeBits = 1;
3955 //allocCreateInfo.flags = VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT;
3956
3957 BufferInfo bufInfo = {};
3958 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, nullptr);
3959 if(res == VK_SUCCESS)
3960 {
3961 buffers.push_back(std::move(bufInfo));
3962 }
3963 else
3964 {
3965 break;
3966 }
3967 }
3968
3969 char* statsString;
3970 vmaBuildStatsString(g_hAllocator, &statsString, VK_TRUE);
3971 printf("%s\n", statsString);
3972 vmaFreeStatsString(g_hAllocator, statsString);
3973
3974 VmaBudget budget1[VK_MAX_MEMORY_HEAPS];
3975 vmaGetBudget(g_hAllocator, budget1);
3976
3977 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3978
3979 VmaBudget budget2[VK_MAX_MEMORY_HEAPS];
3980 vmaGetBudget(g_hAllocator, budget2);
3981
3982 for(size_t i = buffers.size(); i--; )
3983 {
3984 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
3985 }
3986 }
Adam Sawicki40ffe982019-10-11 15:56:02 +02003987}
3988
Adam Sawickib8333fb2018-03-13 16:15:53 +01003989static void TestMapping()
3990{
3991 wprintf(L"Testing mapping...\n");
3992
3993 VkResult res;
3994 uint32_t memTypeIndex = UINT32_MAX;
3995
3996 enum TEST
3997 {
3998 TEST_NORMAL,
3999 TEST_POOL,
4000 TEST_DEDICATED,
4001 TEST_COUNT
4002 };
4003 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4004 {
4005 VmaPool pool = nullptr;
4006 if(testIndex == TEST_POOL)
4007 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004008 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004009 VmaPoolCreateInfo poolInfo = {};
4010 poolInfo.memoryTypeIndex = memTypeIndex;
4011 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004012 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004013 }
4014
4015 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4016 bufInfo.size = 0x10000;
4017 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004018
Adam Sawickib8333fb2018-03-13 16:15:53 +01004019 VmaAllocationCreateInfo allocCreateInfo = {};
4020 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4021 allocCreateInfo.pool = pool;
4022 if(testIndex == TEST_DEDICATED)
4023 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004024
Adam Sawickib8333fb2018-03-13 16:15:53 +01004025 VmaAllocationInfo allocInfo;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004026
Adam Sawickib8333fb2018-03-13 16:15:53 +01004027 // Mapped manually
4028
4029 // Create 2 buffers.
4030 BufferInfo bufferInfos[3];
4031 for(size_t i = 0; i < 2; ++i)
4032 {
4033 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4034 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004035 TEST(res == VK_SUCCESS);
4036 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004037 memTypeIndex = allocInfo.memoryType;
4038 }
Adam Sawicki40ffe982019-10-11 15:56:02 +02004039
Adam Sawickib8333fb2018-03-13 16:15:53 +01004040 // Map buffer 0.
4041 char* data00 = nullptr;
4042 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004043 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004044 data00[0xFFFF] = data00[0];
4045
4046 // Map buffer 0 second time.
4047 char* data01 = nullptr;
4048 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004049 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004050
4051 // Map buffer 1.
4052 char* data1 = nullptr;
4053 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004054 TEST(res == VK_SUCCESS && data1 != nullptr);
4055 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004056 data1[0xFFFF] = data1[0];
4057
4058 // Unmap buffer 0 two times.
4059 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4060 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4061 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004062 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004063
4064 // Unmap buffer 1.
4065 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4066 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004067 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004068
4069 // Create 3rd buffer - persistently mapped.
4070 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4071 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4072 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004073 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004074
4075 // Map buffer 2.
4076 char* data2 = nullptr;
4077 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004078 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004079 data2[0xFFFF] = data2[0];
4080
4081 // Unmap buffer 2.
4082 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4083 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004084 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004085
4086 // Destroy all buffers.
4087 for(size_t i = 3; i--; )
4088 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4089
4090 vmaDestroyPool(g_hAllocator, pool);
4091 }
4092}
4093
Adam Sawickidaa6a552019-06-25 15:26:37 +02004094// Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
4095static void TestDeviceLocalMapped()
4096{
4097 VkResult res;
4098
4099 for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
4100 {
4101 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4102 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4103 bufCreateInfo.size = 4096;
4104
4105 VmaPool pool = VK_NULL_HANDLE;
4106 VmaAllocationCreateInfo allocCreateInfo = {};
4107 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4108 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
4109 if(testIndex == 2)
4110 {
4111 VmaPoolCreateInfo poolCreateInfo = {};
4112 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4113 TEST(res == VK_SUCCESS);
4114 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
4115 TEST(res == VK_SUCCESS);
4116 allocCreateInfo.pool = pool;
4117 }
4118 else if(testIndex == 1)
4119 {
4120 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
4121 }
4122
4123 VkBuffer buf = VK_NULL_HANDLE;
4124 VmaAllocation alloc = VK_NULL_HANDLE;
4125 VmaAllocationInfo allocInfo = {};
4126 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
4127 TEST(res == VK_SUCCESS && alloc);
4128
4129 VkMemoryPropertyFlags memTypeFlags = 0;
4130 vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
4131 const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
4132 TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
4133
4134 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4135 vmaDestroyPool(g_hAllocator, pool);
4136 }
4137}
4138
Adam Sawickib8333fb2018-03-13 16:15:53 +01004139static void TestMappingMultithreaded()
4140{
4141 wprintf(L"Testing mapping multithreaded...\n");
4142
4143 static const uint32_t threadCount = 16;
4144 static const uint32_t bufferCount = 1024;
4145 static const uint32_t threadBufferCount = bufferCount / threadCount;
4146
4147 VkResult res;
4148 volatile uint32_t memTypeIndex = UINT32_MAX;
4149
4150 enum TEST
4151 {
4152 TEST_NORMAL,
4153 TEST_POOL,
4154 TEST_DEDICATED,
4155 TEST_COUNT
4156 };
4157 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4158 {
4159 VmaPool pool = nullptr;
4160 if(testIndex == TEST_POOL)
4161 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004162 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004163 VmaPoolCreateInfo poolInfo = {};
4164 poolInfo.memoryTypeIndex = memTypeIndex;
4165 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004166 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004167 }
4168
4169 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4170 bufCreateInfo.size = 0x10000;
4171 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4172
4173 VmaAllocationCreateInfo allocCreateInfo = {};
4174 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4175 allocCreateInfo.pool = pool;
4176 if(testIndex == TEST_DEDICATED)
4177 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4178
4179 std::thread threads[threadCount];
4180 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4181 {
4182 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4183 // ======== THREAD FUNCTION ========
4184
4185 RandomNumberGenerator rand{threadIndex};
4186
4187 enum class MODE
4188 {
4189 // Don't map this buffer at all.
4190 DONT_MAP,
4191 // Map and quickly unmap.
4192 MAP_FOR_MOMENT,
4193 // Map and unmap before destruction.
4194 MAP_FOR_LONGER,
4195 // Map two times. Quickly unmap, second unmap before destruction.
4196 MAP_TWO_TIMES,
4197 // Create this buffer as persistently mapped.
4198 PERSISTENTLY_MAPPED,
4199 COUNT
4200 };
4201 std::vector<BufferInfo> bufInfos{threadBufferCount};
4202 std::vector<MODE> bufModes{threadBufferCount};
4203
4204 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4205 {
4206 BufferInfo& bufInfo = bufInfos[bufferIndex];
4207 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4208 bufModes[bufferIndex] = mode;
4209
4210 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4211 if(mode == MODE::PERSISTENTLY_MAPPED)
4212 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4213
4214 VmaAllocationInfo allocInfo;
4215 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4216 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004217 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004218
4219 if(memTypeIndex == UINT32_MAX)
4220 memTypeIndex = allocInfo.memoryType;
4221
4222 char* data = nullptr;
4223
4224 if(mode == MODE::PERSISTENTLY_MAPPED)
4225 {
4226 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004227 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004228 }
4229 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4230 mode == MODE::MAP_TWO_TIMES)
4231 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004232 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004233 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004234 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004235
4236 if(mode == MODE::MAP_TWO_TIMES)
4237 {
4238 char* data2 = nullptr;
4239 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004240 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004241 }
4242 }
4243 else if(mode == MODE::DONT_MAP)
4244 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004245 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004246 }
4247 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004248 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004249
4250 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4251 if(data)
4252 data[0xFFFF] = data[0];
4253
4254 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4255 {
4256 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4257
4258 VmaAllocationInfo allocInfo;
4259 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4260 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004261 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004262 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004263 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004264 }
4265
4266 switch(rand.Generate() % 3)
4267 {
4268 case 0: Sleep(0); break; // Yield.
4269 case 1: Sleep(10); break; // 10 ms
4270 // default: No sleep.
4271 }
4272
4273 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4274 if(data)
4275 data[0xFFFF] = data[0];
4276 }
4277
4278 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4279 {
4280 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4281 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4282 {
4283 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4284
4285 VmaAllocationInfo allocInfo;
4286 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004287 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004288 }
4289
4290 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4291 }
4292 });
4293 }
4294
4295 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4296 threads[threadIndex].join();
4297
4298 vmaDestroyPool(g_hAllocator, pool);
4299 }
4300}
4301
4302static void WriteMainTestResultHeader(FILE* file)
4303{
4304 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004305 "Code,Time,"
4306 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004307 "Total Time (us),"
4308 "Allocation Time Min (us),"
4309 "Allocation Time Avg (us),"
4310 "Allocation Time Max (us),"
4311 "Deallocation Time Min (us),"
4312 "Deallocation Time Avg (us),"
4313 "Deallocation Time Max (us),"
4314 "Total Memory Allocated (B),"
4315 "Free Range Size Avg (B),"
4316 "Free Range Size Max (B)\n");
4317}
4318
4319static void WriteMainTestResult(
4320 FILE* file,
4321 const char* codeDescription,
4322 const char* testDescription,
4323 const Config& config, const Result& result)
4324{
4325 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4326 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4327 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4328 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4329 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4330 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4331 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4332
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004333 std::string currTime;
4334 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004335
4336 fprintf(file,
4337 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004338 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4339 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004340 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004341 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004342 totalTimeSeconds * 1e6f,
4343 allocationTimeMinSeconds * 1e6f,
4344 allocationTimeAvgSeconds * 1e6f,
4345 allocationTimeMaxSeconds * 1e6f,
4346 deallocationTimeMinSeconds * 1e6f,
4347 deallocationTimeAvgSeconds * 1e6f,
4348 deallocationTimeMaxSeconds * 1e6f,
4349 result.TotalMemoryAllocated,
4350 result.FreeRangeSizeAvg,
4351 result.FreeRangeSizeMax);
4352}
4353
4354static void WritePoolTestResultHeader(FILE* file)
4355{
4356 fprintf(file,
4357 "Code,Test,Time,"
4358 "Config,"
4359 "Total Time (us),"
4360 "Allocation Time Min (us),"
4361 "Allocation Time Avg (us),"
4362 "Allocation Time Max (us),"
4363 "Deallocation Time Min (us),"
4364 "Deallocation Time Avg (us),"
4365 "Deallocation Time Max (us),"
4366 "Lost Allocation Count,"
4367 "Lost Allocation Total Size (B),"
4368 "Failed Allocation Count,"
4369 "Failed Allocation Total Size (B)\n");
4370}
4371
4372static void WritePoolTestResult(
4373 FILE* file,
4374 const char* codeDescription,
4375 const char* testDescription,
4376 const PoolTestConfig& config,
4377 const PoolTestResult& result)
4378{
4379 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4380 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4381 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4382 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4383 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4384 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4385 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4386
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004387 std::string currTime;
4388 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004389
4390 fprintf(file,
4391 "%s,%s,%s,"
4392 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4393 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4394 // General
4395 codeDescription,
4396 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004397 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004398 // Config
4399 config.ThreadCount,
4400 (unsigned long long)config.PoolSize,
4401 config.FrameCount,
4402 config.TotalItemCount,
4403 config.UsedItemCountMin,
4404 config.UsedItemCountMax,
4405 config.ItemsToMakeUnusedPercent,
4406 // Results
4407 totalTimeSeconds * 1e6f,
4408 allocationTimeMinSeconds * 1e6f,
4409 allocationTimeAvgSeconds * 1e6f,
4410 allocationTimeMaxSeconds * 1e6f,
4411 deallocationTimeMinSeconds * 1e6f,
4412 deallocationTimeAvgSeconds * 1e6f,
4413 deallocationTimeMaxSeconds * 1e6f,
4414 result.LostAllocationCount,
4415 result.LostAllocationTotalSize,
4416 result.FailedAllocationCount,
4417 result.FailedAllocationTotalSize);
4418}
4419
4420static void PerformCustomMainTest(FILE* file)
4421{
4422 Config config{};
4423 config.RandSeed = 65735476;
4424 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4425 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4426 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4427 config.FreeOrder = FREE_ORDER::FORWARD;
4428 config.ThreadCount = 16;
4429 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004430 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004431
4432 // Buffers
4433 //config.AllocationSizes.push_back({4, 16, 1024});
4434 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4435
4436 // Images
4437 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4438 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4439
4440 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4441 config.AdditionalOperationCount = 1024;
4442
4443 Result result{};
4444 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004445 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004446 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4447}
4448
4449static void PerformCustomPoolTest(FILE* file)
4450{
4451 PoolTestConfig config;
4452 config.PoolSize = 100 * 1024 * 1024;
4453 config.RandSeed = 2345764;
4454 config.ThreadCount = 1;
4455 config.FrameCount = 200;
4456 config.ItemsToMakeUnusedPercent = 2;
4457
4458 AllocationSize allocSize = {};
4459 allocSize.BufferSizeMin = 1024;
4460 allocSize.BufferSizeMax = 1024 * 1024;
4461 allocSize.Probability = 1;
4462 config.AllocationSizes.push_back(allocSize);
4463
4464 allocSize.BufferSizeMin = 0;
4465 allocSize.BufferSizeMax = 0;
4466 allocSize.ImageSizeMin = 128;
4467 allocSize.ImageSizeMax = 1024;
4468 allocSize.Probability = 1;
4469 config.AllocationSizes.push_back(allocSize);
4470
4471 config.PoolSize = config.CalcAvgResourceSize() * 200;
4472 config.UsedItemCountMax = 160;
4473 config.TotalItemCount = config.UsedItemCountMax * 10;
4474 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4475
4476 g_MemoryAliasingWarningEnabled = false;
4477 PoolTestResult result = {};
4478 TestPool_Benchmark(result, config);
4479 g_MemoryAliasingWarningEnabled = true;
4480
4481 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4482}
4483
Adam Sawickib8333fb2018-03-13 16:15:53 +01004484static void PerformMainTests(FILE* file)
4485{
4486 uint32_t repeatCount = 1;
4487 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4488
4489 Config config{};
4490 config.RandSeed = 65735476;
4491 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4492 config.FreeOrder = FREE_ORDER::FORWARD;
4493
4494 size_t threadCountCount = 1;
4495 switch(ConfigType)
4496 {
4497 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4498 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4499 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4500 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4501 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4502 default: assert(0);
4503 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004504
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004505 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004506
Adam Sawickib8333fb2018-03-13 16:15:53 +01004507 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4508 {
4509 std::string desc1;
4510
4511 switch(threadCountIndex)
4512 {
4513 case 0:
4514 desc1 += "1_thread";
4515 config.ThreadCount = 1;
4516 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4517 break;
4518 case 1:
4519 desc1 += "16_threads+0%_common";
4520 config.ThreadCount = 16;
4521 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4522 break;
4523 case 2:
4524 desc1 += "16_threads+50%_common";
4525 config.ThreadCount = 16;
4526 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4527 break;
4528 case 3:
4529 desc1 += "16_threads+100%_common";
4530 config.ThreadCount = 16;
4531 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4532 break;
4533 case 4:
4534 desc1 += "2_threads+0%_common";
4535 config.ThreadCount = 2;
4536 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4537 break;
4538 case 5:
4539 desc1 += "2_threads+50%_common";
4540 config.ThreadCount = 2;
4541 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4542 break;
4543 case 6:
4544 desc1 += "2_threads+100%_common";
4545 config.ThreadCount = 2;
4546 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4547 break;
4548 default:
4549 assert(0);
4550 }
4551
4552 // 0 = buffers, 1 = images, 2 = buffers and images
4553 size_t buffersVsImagesCount = 2;
4554 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4555 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4556 {
4557 std::string desc2 = desc1;
4558 switch(buffersVsImagesIndex)
4559 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004560 case 0: desc2 += ",Buffers"; break;
4561 case 1: desc2 += ",Images"; break;
4562 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004563 default: assert(0);
4564 }
4565
4566 // 0 = small, 1 = large, 2 = small and large
4567 size_t smallVsLargeCount = 2;
4568 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4569 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4570 {
4571 std::string desc3 = desc2;
4572 switch(smallVsLargeIndex)
4573 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004574 case 0: desc3 += ",Small"; break;
4575 case 1: desc3 += ",Large"; break;
4576 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004577 default: assert(0);
4578 }
4579
4580 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4581 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4582 else
4583 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4584
4585 // 0 = varying sizes min...max, 1 = set of constant sizes
4586 size_t constantSizesCount = 1;
4587 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4588 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4589 {
4590 std::string desc4 = desc3;
4591 switch(constantSizesIndex)
4592 {
4593 case 0: desc4 += " Varying_sizes"; break;
4594 case 1: desc4 += " Constant_sizes"; break;
4595 default: assert(0);
4596 }
4597
4598 config.AllocationSizes.clear();
4599 // Buffers present
4600 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4601 {
4602 // Small
4603 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4604 {
4605 // Varying size
4606 if(constantSizesIndex == 0)
4607 config.AllocationSizes.push_back({4, 16, 1024});
4608 // Constant sizes
4609 else
4610 {
4611 config.AllocationSizes.push_back({1, 16, 16});
4612 config.AllocationSizes.push_back({1, 64, 64});
4613 config.AllocationSizes.push_back({1, 256, 256});
4614 config.AllocationSizes.push_back({1, 1024, 1024});
4615 }
4616 }
4617 // Large
4618 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4619 {
4620 // Varying size
4621 if(constantSizesIndex == 0)
4622 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4623 // Constant sizes
4624 else
4625 {
4626 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4627 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4628 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4629 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4630 }
4631 }
4632 }
4633 // Images present
4634 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4635 {
4636 // Small
4637 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4638 {
4639 // Varying size
4640 if(constantSizesIndex == 0)
4641 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4642 // Constant sizes
4643 else
4644 {
4645 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4646 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4647 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4648 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4649 }
4650 }
4651 // Large
4652 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4653 {
4654 // Varying size
4655 if(constantSizesIndex == 0)
4656 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4657 // Constant sizes
4658 else
4659 {
4660 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4661 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4662 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4663 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4664 }
4665 }
4666 }
4667
4668 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4669 size_t beginBytesToAllocateCount = 1;
4670 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4671 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4672 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4673 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4674 {
4675 std::string desc5 = desc4;
4676
4677 switch(beginBytesToAllocateIndex)
4678 {
4679 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004680 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004681 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4682 config.AdditionalOperationCount = 0;
4683 break;
4684 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004685 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004686 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4687 config.AdditionalOperationCount = 1024;
4688 break;
4689 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004690 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004691 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4692 config.AdditionalOperationCount = 1024;
4693 break;
4694 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004695 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004696 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4697 config.AdditionalOperationCount = 1024;
4698 break;
4699 default:
4700 assert(0);
4701 }
4702
Adam Sawicki0667e332018-08-24 17:26:44 +02004703 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004704 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004705 std::string desc6 = desc5;
4706 switch(strategyIndex)
4707 {
4708 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004709 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004710 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4711 break;
4712 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004713 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004714 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4715 break;
4716 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004717 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004718 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4719 break;
4720 default:
4721 assert(0);
4722 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004723
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004724 desc6 += ',';
4725 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004726
4727 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004728
4729 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4730 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004731 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004732
4733 Result result{};
4734 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004735 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004736 if(file)
4737 {
4738 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4739 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004740 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004741 }
4742 }
4743 }
4744 }
4745 }
4746 }
4747}
4748
4749static void PerformPoolTests(FILE* file)
4750{
4751 const size_t AVG_RESOURCES_PER_POOL = 300;
4752
4753 uint32_t repeatCount = 1;
4754 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4755
4756 PoolTestConfig config{};
4757 config.RandSeed = 2346343;
4758 config.FrameCount = 200;
4759 config.ItemsToMakeUnusedPercent = 2;
4760
4761 size_t threadCountCount = 1;
4762 switch(ConfigType)
4763 {
4764 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4765 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4766 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4767 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4768 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4769 default: assert(0);
4770 }
4771 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4772 {
4773 std::string desc1;
4774
4775 switch(threadCountIndex)
4776 {
4777 case 0:
4778 desc1 += "1_thread";
4779 config.ThreadCount = 1;
4780 break;
4781 case 1:
4782 desc1 += "16_threads";
4783 config.ThreadCount = 16;
4784 break;
4785 case 2:
4786 desc1 += "2_threads";
4787 config.ThreadCount = 2;
4788 break;
4789 default:
4790 assert(0);
4791 }
4792
4793 // 0 = buffers, 1 = images, 2 = buffers and images
4794 size_t buffersVsImagesCount = 2;
4795 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4796 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4797 {
4798 std::string desc2 = desc1;
4799 switch(buffersVsImagesIndex)
4800 {
4801 case 0: desc2 += " Buffers"; break;
4802 case 1: desc2 += " Images"; break;
4803 case 2: desc2 += " Buffers+Images"; break;
4804 default: assert(0);
4805 }
4806
4807 // 0 = small, 1 = large, 2 = small and large
4808 size_t smallVsLargeCount = 2;
4809 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4810 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4811 {
4812 std::string desc3 = desc2;
4813 switch(smallVsLargeIndex)
4814 {
4815 case 0: desc3 += " Small"; break;
4816 case 1: desc3 += " Large"; break;
4817 case 2: desc3 += " Small+Large"; break;
4818 default: assert(0);
4819 }
4820
4821 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4822 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4823 else
4824 config.PoolSize = 4ull * 1024 * 1024;
4825
4826 // 0 = varying sizes min...max, 1 = set of constant sizes
4827 size_t constantSizesCount = 1;
4828 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4829 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4830 {
4831 std::string desc4 = desc3;
4832 switch(constantSizesIndex)
4833 {
4834 case 0: desc4 += " Varying_sizes"; break;
4835 case 1: desc4 += " Constant_sizes"; break;
4836 default: assert(0);
4837 }
4838
4839 config.AllocationSizes.clear();
4840 // Buffers present
4841 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4842 {
4843 // Small
4844 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4845 {
4846 // Varying size
4847 if(constantSizesIndex == 0)
4848 config.AllocationSizes.push_back({4, 16, 1024});
4849 // Constant sizes
4850 else
4851 {
4852 config.AllocationSizes.push_back({1, 16, 16});
4853 config.AllocationSizes.push_back({1, 64, 64});
4854 config.AllocationSizes.push_back({1, 256, 256});
4855 config.AllocationSizes.push_back({1, 1024, 1024});
4856 }
4857 }
4858 // Large
4859 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4860 {
4861 // Varying size
4862 if(constantSizesIndex == 0)
4863 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4864 // Constant sizes
4865 else
4866 {
4867 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4868 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4869 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4870 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4871 }
4872 }
4873 }
4874 // Images present
4875 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4876 {
4877 // Small
4878 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4879 {
4880 // Varying size
4881 if(constantSizesIndex == 0)
4882 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4883 // Constant sizes
4884 else
4885 {
4886 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4887 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4888 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4889 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4890 }
4891 }
4892 // Large
4893 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4894 {
4895 // Varying size
4896 if(constantSizesIndex == 0)
4897 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4898 // Constant sizes
4899 else
4900 {
4901 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4902 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4903 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4904 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4905 }
4906 }
4907 }
4908
4909 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4910 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4911
4912 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4913 size_t subscriptionModeCount;
4914 switch(ConfigType)
4915 {
4916 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4917 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4918 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4919 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4920 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4921 default: assert(0);
4922 }
4923 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4924 {
4925 std::string desc5 = desc4;
4926
4927 switch(subscriptionModeIndex)
4928 {
4929 case 0:
4930 desc5 += " Subscription_66%";
4931 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4932 break;
4933 case 1:
4934 desc5 += " Subscription_133%";
4935 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4936 break;
4937 case 2:
4938 desc5 += " Subscription_100%";
4939 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4940 break;
4941 case 3:
4942 desc5 += " Subscription_33%";
4943 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4944 break;
4945 case 4:
4946 desc5 += " Subscription_166%";
4947 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4948 break;
4949 default:
4950 assert(0);
4951 }
4952
4953 config.TotalItemCount = config.UsedItemCountMax * 5;
4954 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4955
4956 const char* testDescription = desc5.c_str();
4957
4958 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4959 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004960 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004961
4962 PoolTestResult result{};
4963 g_MemoryAliasingWarningEnabled = false;
4964 TestPool_Benchmark(result, config);
4965 g_MemoryAliasingWarningEnabled = true;
4966 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4967 }
4968 }
4969 }
4970 }
4971 }
4972 }
4973}
4974
Adam Sawickia83793a2018-09-03 13:40:42 +02004975static void BasicTestBuddyAllocator()
4976{
4977 wprintf(L"Basic test buddy allocator\n");
4978
4979 RandomNumberGenerator rand{76543};
4980
4981 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4982 sampleBufCreateInfo.size = 1024; // Whatever.
4983 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4984
4985 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4986 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4987
4988 VmaPoolCreateInfo poolCreateInfo = {};
4989 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004990 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004991
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004992 // Deliberately adding 1023 to test usable size smaller than memory block size.
4993 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004994 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004995 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004996
4997 VmaPool pool = nullptr;
4998 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004999 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005000
5001 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
5002
5003 VmaAllocationCreateInfo allocCreateInfo = {};
5004 allocCreateInfo.pool = pool;
5005
5006 std::vector<BufferInfo> bufInfo;
5007 BufferInfo newBufInfo;
5008 VmaAllocationInfo allocInfo;
5009
5010 bufCreateInfo.size = 1024 * 256;
5011 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5012 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005013 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005014 bufInfo.push_back(newBufInfo);
5015
5016 bufCreateInfo.size = 1024 * 512;
5017 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5018 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005019 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005020 bufInfo.push_back(newBufInfo);
5021
5022 bufCreateInfo.size = 1024 * 128;
5023 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5024 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005025 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005026 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02005027
5028 // Test very small allocation, smaller than minimum node size.
5029 bufCreateInfo.size = 1;
5030 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5031 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005032 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02005033 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02005034
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005035 // Test some small allocation with alignment requirement.
5036 {
5037 VkMemoryRequirements memReq;
5038 memReq.alignment = 256;
5039 memReq.memoryTypeBits = UINT32_MAX;
5040 memReq.size = 32;
5041
5042 newBufInfo.Buffer = VK_NULL_HANDLE;
5043 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
5044 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005045 TEST(res == VK_SUCCESS);
5046 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005047 bufInfo.push_back(newBufInfo);
5048 }
5049
5050 //SaveAllocatorStatsToFile(L"TEST.json");
5051
Adam Sawicki21017c62018-09-07 15:26:59 +02005052 VmaPoolStats stats = {};
5053 vmaGetPoolStats(g_hAllocator, pool, &stats);
5054 int DBG = 0; // Set breakpoint here to inspect `stats`.
5055
Adam Sawicki80927152018-09-07 17:27:23 +02005056 // Allocate enough new buffers to surely fall into second block.
5057 for(uint32_t i = 0; i < 32; ++i)
5058 {
5059 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
5060 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5061 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005062 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02005063 bufInfo.push_back(newBufInfo);
5064 }
5065
5066 SaveAllocatorStatsToFile(L"BuddyTest01.json");
5067
Adam Sawickia83793a2018-09-03 13:40:42 +02005068 // Destroy the buffers in random order.
5069 while(!bufInfo.empty())
5070 {
5071 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
5072 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
5073 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
5074 bufInfo.erase(bufInfo.begin() + indexToDestroy);
5075 }
5076
5077 vmaDestroyPool(g_hAllocator, pool);
5078}
5079
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005080static void BasicTestAllocatePages()
5081{
5082 wprintf(L"Basic test allocate pages\n");
5083
5084 RandomNumberGenerator rand{765461};
5085
5086 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5087 sampleBufCreateInfo.size = 1024; // Whatever.
5088 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5089
5090 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5091 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5092
5093 VmaPoolCreateInfo poolCreateInfo = {};
5094 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02005095 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005096
5097 // 1 block of 1 MB.
5098 poolCreateInfo.blockSize = 1024 * 1024;
5099 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
5100
5101 // Create pool.
5102 VmaPool pool = nullptr;
5103 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02005104 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005105
5106 // Make 100 allocations of 4 KB - they should fit into the pool.
5107 VkMemoryRequirements memReq;
5108 memReq.memoryTypeBits = UINT32_MAX;
5109 memReq.alignment = 4 * 1024;
5110 memReq.size = 4 * 1024;
5111
5112 VmaAllocationCreateInfo allocCreateInfo = {};
5113 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5114 allocCreateInfo.pool = pool;
5115
5116 constexpr uint32_t allocCount = 100;
5117
5118 std::vector<VmaAllocation> alloc{allocCount};
5119 std::vector<VmaAllocationInfo> allocInfo{allocCount};
5120 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005121 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005122 for(uint32_t i = 0; i < allocCount; ++i)
5123 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005124 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005125 allocInfo[i].pMappedData != nullptr &&
5126 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5127 allocInfo[i].memoryType == allocInfo[0].memoryType);
5128 }
5129
5130 // Free the allocations.
5131 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5132 std::fill(alloc.begin(), alloc.end(), nullptr);
5133 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5134
5135 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5136 // Also test optional allocationInfo = null.
5137 memReq.size = 100 * 1024;
5138 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005139 TEST(res != VK_SUCCESS);
5140 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005141
5142 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5143 memReq.size = 4 * 1024;
5144 memReq.alignment = 128 * 1024;
5145 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005146 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005147
5148 // Make 100 dedicated allocations of 4 KB.
5149 memReq.alignment = 4 * 1024;
5150 memReq.size = 4 * 1024;
5151
5152 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5153 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5154 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5155 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005156 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005157 for(uint32_t i = 0; i < allocCount; ++i)
5158 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005159 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005160 allocInfo[i].pMappedData != nullptr &&
5161 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5162 allocInfo[i].offset == 0);
5163 if(i > 0)
5164 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005165 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005166 }
5167 }
5168
5169 // Free the allocations.
5170 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5171 std::fill(alloc.begin(), alloc.end(), nullptr);
5172 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5173
5174 vmaDestroyPool(g_hAllocator, pool);
5175}
5176
Adam Sawickif2975342018-10-16 13:49:02 +02005177// Test the testing environment.
5178static void TestGpuData()
5179{
5180 RandomNumberGenerator rand = { 53434 };
5181
5182 std::vector<AllocInfo> allocInfo;
5183
5184 for(size_t i = 0; i < 100; ++i)
5185 {
5186 AllocInfo info = {};
5187
5188 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5189 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5190 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5191 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5192 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5193
5194 VmaAllocationCreateInfo allocCreateInfo = {};
5195 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5196
5197 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5198 TEST(res == VK_SUCCESS);
5199
5200 info.m_StartValue = rand.Generate();
5201
5202 allocInfo.push_back(std::move(info));
5203 }
5204
5205 UploadGpuData(allocInfo.data(), allocInfo.size());
5206
5207 ValidateGpuData(allocInfo.data(), allocInfo.size());
5208
5209 DestroyAllAllocations(allocInfo);
5210}
5211
Adam Sawickib8333fb2018-03-13 16:15:53 +01005212void Test()
5213{
5214 wprintf(L"TESTING:\n");
5215
Adam Sawicki40ffe982019-10-11 15:56:02 +02005216 if(true)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005217 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005218 ////////////////////////////////////////////////////////////////////////////////
5219 // Temporarily insert custom tests here:
Adam Sawicki40ffe982019-10-11 15:56:02 +02005220 TestBudget();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005221 return;
5222 }
5223
Adam Sawickib8333fb2018-03-13 16:15:53 +01005224 // # Simple tests
5225
5226 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005227 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005228#if VMA_DEBUG_MARGIN
5229 TestDebugMargin();
5230#else
5231 TestPool_SameSize();
5232 TestHeapSizeLimit();
5233#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005234#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5235 TestAllocationsInitialization();
5236#endif
Adam Sawicki40ffe982019-10-11 15:56:02 +02005237 TestBudget();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005238 TestMapping();
Adam Sawickidaa6a552019-06-25 15:26:37 +02005239 TestDeviceLocalMapped();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005240 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005241 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005242 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005243 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005244
Adam Sawicki4338f662018-09-07 14:12:37 +02005245 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005246 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005247
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005248 {
5249 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005250 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005251 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005252 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005253 fclose(file);
5254 }
5255
Adam Sawickib8333fb2018-03-13 16:15:53 +01005256 TestDefragmentationSimple();
5257 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005258 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005259 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005260
5261 // # Detailed tests
5262 FILE* file;
5263 fopen_s(&file, "Results.csv", "w");
5264 assert(file != NULL);
5265
5266 WriteMainTestResultHeader(file);
5267 PerformMainTests(file);
5268 //PerformCustomMainTest(file);
5269
5270 WritePoolTestResultHeader(file);
5271 PerformPoolTests(file);
5272 //PerformCustomPoolTest(file);
5273
5274 fclose(file);
5275
5276 wprintf(L"Done.\n");
5277}
5278
Adam Sawickif1a793c2018-03-13 15:42:22 +01005279#endif // #ifdef _WIN32