blob: 5a6057f655614d0d1f19fc926f93ee7e7a4c23bb [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
2// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawicki94ce3d72019-04-17 14:59:25 +020029#include <functional>
Adam Sawickif1a793c2018-03-13 15:42:22 +010030
31#ifdef _WIN32
32
Adam Sawicki33d2ce72018-08-27 13:59:13 +020033static const char* CODE_DESCRIPTION = "Foo";
34
Adam Sawickif2975342018-10-16 13:49:02 +020035extern VkCommandBuffer g_hTemporaryCommandBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +020036extern const VkAllocationCallbacks* g_Allocs;
Adam Sawickif2975342018-10-16 13:49:02 +020037void BeginSingleTimeCommands();
38void EndSingleTimeCommands();
39
Adam Sawickibdb89a92018-12-13 11:56:30 +010040#ifndef VMA_DEBUG_MARGIN
41 #define VMA_DEBUG_MARGIN 0
42#endif
43
Adam Sawicki0a607132018-08-24 11:18:41 +020044enum CONFIG_TYPE {
45 CONFIG_TYPE_MINIMUM,
46 CONFIG_TYPE_SMALL,
47 CONFIG_TYPE_AVERAGE,
48 CONFIG_TYPE_LARGE,
49 CONFIG_TYPE_MAXIMUM,
50 CONFIG_TYPE_COUNT
51};
52
Adam Sawickif2975342018-10-16 13:49:02 +020053static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
54//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020055
Adam Sawickib8333fb2018-03-13 16:15:53 +010056enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
57
Adam Sawicki0667e332018-08-24 17:26:44 +020058static const char* FREE_ORDER_NAMES[] = {
59 "FORWARD",
60 "BACKWARD",
61 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020062};
63
Adam Sawicki80927152018-09-07 17:27:23 +020064// Copy of internal VmaAlgorithmToStr.
65static const char* AlgorithmToStr(uint32_t algorithm)
66{
67 switch(algorithm)
68 {
69 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
70 return "Linear";
71 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
72 return "Buddy";
73 case 0:
74 return "Default";
75 default:
76 assert(0);
77 return "";
78 }
79}
80
Adam Sawickib8333fb2018-03-13 16:15:53 +010081struct AllocationSize
82{
83 uint32_t Probability;
84 VkDeviceSize BufferSizeMin, BufferSizeMax;
85 uint32_t ImageSizeMin, ImageSizeMax;
86};
87
88struct Config
89{
90 uint32_t RandSeed;
91 VkDeviceSize BeginBytesToAllocate;
92 uint32_t AdditionalOperationCount;
93 VkDeviceSize MaxBytesToAllocate;
94 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
95 std::vector<AllocationSize> AllocationSizes;
96 uint32_t ThreadCount;
97 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
98 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020099 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +0100100};
101
102struct Result
103{
104 duration TotalTime;
105 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
106 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
107 VkDeviceSize TotalMemoryAllocated;
108 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
109};
110
111void TestDefragmentationSimple();
112void TestDefragmentationFull();
113
114struct PoolTestConfig
115{
116 uint32_t RandSeed;
117 uint32_t ThreadCount;
118 VkDeviceSize PoolSize;
119 uint32_t FrameCount;
120 uint32_t TotalItemCount;
121 // Range for number of items used in each frame.
122 uint32_t UsedItemCountMin, UsedItemCountMax;
123 // Percent of items to make unused, and possibly make some others used in each frame.
124 uint32_t ItemsToMakeUnusedPercent;
125 std::vector<AllocationSize> AllocationSizes;
126
127 VkDeviceSize CalcAvgResourceSize() const
128 {
129 uint32_t probabilitySum = 0;
130 VkDeviceSize sizeSum = 0;
131 for(size_t i = 0; i < AllocationSizes.size(); ++i)
132 {
133 const AllocationSize& allocSize = AllocationSizes[i];
134 if(allocSize.BufferSizeMax > 0)
135 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
136 else
137 {
138 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
139 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
140 }
141 probabilitySum += allocSize.Probability;
142 }
143 return sizeSum / probabilitySum;
144 }
145
146 bool UsesBuffers() const
147 {
148 for(size_t i = 0; i < AllocationSizes.size(); ++i)
149 if(AllocationSizes[i].BufferSizeMax > 0)
150 return true;
151 return false;
152 }
153
154 bool UsesImages() const
155 {
156 for(size_t i = 0; i < AllocationSizes.size(); ++i)
157 if(AllocationSizes[i].ImageSizeMax > 0)
158 return true;
159 return false;
160 }
161};
162
163struct PoolTestResult
164{
165 duration TotalTime;
166 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
167 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
168 size_t LostAllocationCount, LostAllocationTotalSize;
169 size_t FailedAllocationCount, FailedAllocationTotalSize;
170};
171
172static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
173
Adam Sawicki51fa9662018-10-03 13:44:29 +0200174uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200175
Adam Sawickib8333fb2018-03-13 16:15:53 +0100176struct BufferInfo
177{
178 VkBuffer Buffer = VK_NULL_HANDLE;
179 VmaAllocation Allocation = VK_NULL_HANDLE;
180};
181
Adam Sawicki40ffe982019-10-11 15:56:02 +0200182static uint32_t MemoryTypeToHeap(uint32_t memoryTypeIndex)
183{
184 const VkPhysicalDeviceMemoryProperties* props;
185 vmaGetMemoryProperties(g_hAllocator, &props);
186 return props->memoryTypes[memoryTypeIndex].heapIndex;
187}
188
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200189static uint32_t GetAllocationStrategyCount()
190{
191 uint32_t strategyCount = 0;
192 switch(ConfigType)
193 {
194 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
195 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
196 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
197 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
198 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
199 default: assert(0);
200 }
201 return strategyCount;
202}
203
204static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
205{
206 switch(allocStrategy)
207 {
208 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
209 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
210 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
211 case 0: return "Default"; break;
212 default: assert(0); return "";
213 }
214}
215
Adam Sawickib8333fb2018-03-13 16:15:53 +0100216static void InitResult(Result& outResult)
217{
218 outResult.TotalTime = duration::zero();
219 outResult.AllocationTimeMin = duration::max();
220 outResult.AllocationTimeAvg = duration::zero();
221 outResult.AllocationTimeMax = duration::min();
222 outResult.DeallocationTimeMin = duration::max();
223 outResult.DeallocationTimeAvg = duration::zero();
224 outResult.DeallocationTimeMax = duration::min();
225 outResult.TotalMemoryAllocated = 0;
226 outResult.FreeRangeSizeAvg = 0;
227 outResult.FreeRangeSizeMax = 0;
228}
229
230class TimeRegisterObj
231{
232public:
233 TimeRegisterObj(duration& min, duration& sum, duration& max) :
234 m_Min(min),
235 m_Sum(sum),
236 m_Max(max),
237 m_TimeBeg(std::chrono::high_resolution_clock::now())
238 {
239 }
240
241 ~TimeRegisterObj()
242 {
243 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
244 m_Sum += d;
245 if(d < m_Min) m_Min = d;
246 if(d > m_Max) m_Max = d;
247 }
248
249private:
250 duration& m_Min;
251 duration& m_Sum;
252 duration& m_Max;
253 time_point m_TimeBeg;
254};
255
256struct PoolTestThreadResult
257{
258 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
259 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
260 size_t AllocationCount, DeallocationCount;
261 size_t LostAllocationCount, LostAllocationTotalSize;
262 size_t FailedAllocationCount, FailedAllocationTotalSize;
263};
264
265class AllocationTimeRegisterObj : public TimeRegisterObj
266{
267public:
268 AllocationTimeRegisterObj(Result& result) :
269 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
270 {
271 }
272};
273
274class DeallocationTimeRegisterObj : public TimeRegisterObj
275{
276public:
277 DeallocationTimeRegisterObj(Result& result) :
278 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
279 {
280 }
281};
282
283class PoolAllocationTimeRegisterObj : public TimeRegisterObj
284{
285public:
286 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
287 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
288 {
289 }
290};
291
292class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
293{
294public:
295 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
296 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
297 {
298 }
299};
300
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200301static void CurrentTimeToStr(std::string& out)
302{
303 time_t rawTime; time(&rawTime);
304 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
305 char timeStr[128];
306 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
307 out = timeStr;
308}
309
Adam Sawickib8333fb2018-03-13 16:15:53 +0100310VkResult MainTest(Result& outResult, const Config& config)
311{
312 assert(config.ThreadCount > 0);
313
314 InitResult(outResult);
315
316 RandomNumberGenerator mainRand{config.RandSeed};
317
318 time_point timeBeg = std::chrono::high_resolution_clock::now();
319
320 std::atomic<size_t> allocationCount = 0;
321 VkResult res = VK_SUCCESS;
322
323 uint32_t memUsageProbabilitySum =
324 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
325 config.MemUsageProbability[2] + config.MemUsageProbability[3];
326 assert(memUsageProbabilitySum > 0);
327
328 uint32_t allocationSizeProbabilitySum = std::accumulate(
329 config.AllocationSizes.begin(),
330 config.AllocationSizes.end(),
331 0u,
332 [](uint32_t sum, const AllocationSize& allocSize) {
333 return sum + allocSize.Probability;
334 });
335
336 struct Allocation
337 {
338 VkBuffer Buffer;
339 VkImage Image;
340 VmaAllocation Alloc;
341 };
342
343 std::vector<Allocation> commonAllocations;
344 std::mutex commonAllocationsMutex;
345
346 auto Allocate = [&](
347 VkDeviceSize bufferSize,
348 const VkExtent2D imageExtent,
349 RandomNumberGenerator& localRand,
350 VkDeviceSize& totalAllocatedBytes,
351 std::vector<Allocation>& allocations) -> VkResult
352 {
353 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
354
355 uint32_t memUsageIndex = 0;
356 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
357 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
358 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
359
360 VmaAllocationCreateInfo memReq = {};
361 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200362 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100363
364 Allocation allocation = {};
365 VmaAllocationInfo allocationInfo;
366
367 // Buffer
368 if(bufferSize > 0)
369 {
370 assert(imageExtent.width == 0);
371 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
372 bufferInfo.size = bufferSize;
373 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
374
375 {
376 AllocationTimeRegisterObj timeRegisterObj{outResult};
377 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
378 }
379 }
380 // Image
381 else
382 {
383 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
384 imageInfo.imageType = VK_IMAGE_TYPE_2D;
385 imageInfo.extent.width = imageExtent.width;
386 imageInfo.extent.height = imageExtent.height;
387 imageInfo.extent.depth = 1;
388 imageInfo.mipLevels = 1;
389 imageInfo.arrayLayers = 1;
390 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
391 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
392 VK_IMAGE_TILING_OPTIMAL :
393 VK_IMAGE_TILING_LINEAR;
394 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
395 switch(memReq.usage)
396 {
397 case VMA_MEMORY_USAGE_GPU_ONLY:
398 switch(localRand.Generate() % 3)
399 {
400 case 0:
401 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
402 break;
403 case 1:
404 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
405 break;
406 case 2:
407 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
408 break;
409 }
410 break;
411 case VMA_MEMORY_USAGE_CPU_ONLY:
412 case VMA_MEMORY_USAGE_CPU_TO_GPU:
413 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
414 break;
415 case VMA_MEMORY_USAGE_GPU_TO_CPU:
416 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
417 break;
418 }
419 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
420 imageInfo.flags = 0;
421
422 {
423 AllocationTimeRegisterObj timeRegisterObj{outResult};
424 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
425 }
426 }
427
428 if(res == VK_SUCCESS)
429 {
430 ++allocationCount;
431 totalAllocatedBytes += allocationInfo.size;
432 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
433 if(useCommonAllocations)
434 {
435 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
436 commonAllocations.push_back(allocation);
437 }
438 else
439 allocations.push_back(allocation);
440 }
441 else
442 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200443 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100444 }
445 return res;
446 };
447
448 auto GetNextAllocationSize = [&](
449 VkDeviceSize& outBufSize,
450 VkExtent2D& outImageSize,
451 RandomNumberGenerator& localRand)
452 {
453 outBufSize = 0;
454 outImageSize = {0, 0};
455
456 uint32_t allocSizeIndex = 0;
457 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
458 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
459 r -= config.AllocationSizes[allocSizeIndex++].Probability;
460
461 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
462 if(allocSize.BufferSizeMax > 0)
463 {
464 assert(allocSize.ImageSizeMax == 0);
465 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
466 outBufSize = allocSize.BufferSizeMin;
467 else
468 {
469 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
470 outBufSize = outBufSize / 16 * 16;
471 }
472 }
473 else
474 {
475 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
476 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
477 else
478 {
479 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
480 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
481 }
482 }
483 };
484
485 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
486 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
487
488 auto ThreadProc = [&](uint32_t randSeed) -> void
489 {
490 RandomNumberGenerator threadRand(randSeed);
491 VkDeviceSize threadTotalAllocatedBytes = 0;
492 std::vector<Allocation> threadAllocations;
493 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
494 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
495 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
496
497 // BEGIN ALLOCATIONS
498 for(;;)
499 {
500 VkDeviceSize bufferSize = 0;
501 VkExtent2D imageExtent = {};
502 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
503 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
504 threadBeginBytesToAllocate)
505 {
506 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
507 break;
508 }
509 else
510 break;
511 }
512
513 // ADDITIONAL ALLOCATIONS AND FREES
514 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
515 {
516 VkDeviceSize bufferSize = 0;
517 VkExtent2D imageExtent = {};
518 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
519
520 // true = allocate, false = free
521 bool allocate = threadRand.Generate() % 2 != 0;
522
523 if(allocate)
524 {
525 if(threadTotalAllocatedBytes +
526 bufferSize +
527 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
528 threadMaxBytesToAllocate)
529 {
530 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
531 break;
532 }
533 }
534 else
535 {
536 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
537 if(useCommonAllocations)
538 {
539 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
540 if(!commonAllocations.empty())
541 {
542 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
543 VmaAllocationInfo allocationInfo;
544 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
545 if(threadTotalAllocatedBytes >= allocationInfo.size)
546 {
547 DeallocationTimeRegisterObj timeRegisterObj{outResult};
548 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
549 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
550 else
551 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
552 threadTotalAllocatedBytes -= allocationInfo.size;
553 commonAllocations.erase(commonAllocations.begin() + indexToFree);
554 }
555 }
556 }
557 else
558 {
559 if(!threadAllocations.empty())
560 {
561 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
562 VmaAllocationInfo allocationInfo;
563 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
564 if(threadTotalAllocatedBytes >= allocationInfo.size)
565 {
566 DeallocationTimeRegisterObj timeRegisterObj{outResult};
567 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
568 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
569 else
570 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
571 threadTotalAllocatedBytes -= allocationInfo.size;
572 threadAllocations.erase(threadAllocations.begin() + indexToFree);
573 }
574 }
575 }
576 }
577 }
578
579 ++numThreadsReachedMaxAllocations;
580
581 WaitForSingleObject(threadsFinishEvent, INFINITE);
582
583 // DEALLOCATION
584 while(!threadAllocations.empty())
585 {
586 size_t indexToFree = 0;
587 switch(config.FreeOrder)
588 {
589 case FREE_ORDER::FORWARD:
590 indexToFree = 0;
591 break;
592 case FREE_ORDER::BACKWARD:
593 indexToFree = threadAllocations.size() - 1;
594 break;
595 case FREE_ORDER::RANDOM:
596 indexToFree = mainRand.Generate() % threadAllocations.size();
597 break;
598 }
599
600 {
601 DeallocationTimeRegisterObj timeRegisterObj{outResult};
602 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
603 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
604 else
605 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
606 }
607 threadAllocations.erase(threadAllocations.begin() + indexToFree);
608 }
609 };
610
611 uint32_t threadRandSeed = mainRand.Generate();
612 std::vector<std::thread> bkgThreads;
613 for(size_t i = 0; i < config.ThreadCount; ++i)
614 {
615 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
616 }
617
618 // Wait for threads reached max allocations
619 while(numThreadsReachedMaxAllocations < config.ThreadCount)
620 Sleep(0);
621
622 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
623 VmaStats vmaStats = {};
624 vmaCalculateStats(g_hAllocator, &vmaStats);
625 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
626 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
627 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
628
629 // Signal threads to deallocate
630 SetEvent(threadsFinishEvent);
631
632 // Wait for threads finished
633 for(size_t i = 0; i < bkgThreads.size(); ++i)
634 bkgThreads[i].join();
635 bkgThreads.clear();
636
637 CloseHandle(threadsFinishEvent);
638
639 // Deallocate remaining common resources
640 while(!commonAllocations.empty())
641 {
642 size_t indexToFree = 0;
643 switch(config.FreeOrder)
644 {
645 case FREE_ORDER::FORWARD:
646 indexToFree = 0;
647 break;
648 case FREE_ORDER::BACKWARD:
649 indexToFree = commonAllocations.size() - 1;
650 break;
651 case FREE_ORDER::RANDOM:
652 indexToFree = mainRand.Generate() % commonAllocations.size();
653 break;
654 }
655
656 {
657 DeallocationTimeRegisterObj timeRegisterObj{outResult};
658 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
659 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
660 else
661 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
662 }
663 commonAllocations.erase(commonAllocations.begin() + indexToFree);
664 }
665
666 if(allocationCount)
667 {
668 outResult.AllocationTimeAvg /= allocationCount;
669 outResult.DeallocationTimeAvg /= allocationCount;
670 }
671
672 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
673
674 return res;
675}
676
Adam Sawicki51fa9662018-10-03 13:44:29 +0200677void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100678{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100679 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100680 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200681 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100682 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200683 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100684}
685
686struct AllocInfo
687{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200688 VmaAllocation m_Allocation = VK_NULL_HANDLE;
689 VkBuffer m_Buffer = VK_NULL_HANDLE;
690 VkImage m_Image = VK_NULL_HANDLE;
691 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100692 union
693 {
694 VkBufferCreateInfo m_BufferInfo;
695 VkImageCreateInfo m_ImageInfo;
696 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200697
698 void CreateBuffer(
699 const VkBufferCreateInfo& bufCreateInfo,
700 const VmaAllocationCreateInfo& allocCreateInfo);
701 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100702};
703
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200704void AllocInfo::CreateBuffer(
705 const VkBufferCreateInfo& bufCreateInfo,
706 const VmaAllocationCreateInfo& allocCreateInfo)
707{
708 m_BufferInfo = bufCreateInfo;
709 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
710 TEST(res == VK_SUCCESS);
711}
712
713void AllocInfo::Destroy()
714{
715 if(m_Image)
716 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200717 vkDestroyImage(g_hDevice, m_Image, g_Allocs);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100718 m_Image = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200719 }
720 if(m_Buffer)
721 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200722 vkDestroyBuffer(g_hDevice, m_Buffer, g_Allocs);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100723 m_Buffer = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200724 }
725 if(m_Allocation)
726 {
727 vmaFreeMemory(g_hAllocator, m_Allocation);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100728 m_Allocation = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200729 }
730}
731
Adam Sawickif2975342018-10-16 13:49:02 +0200732class StagingBufferCollection
733{
734public:
735 StagingBufferCollection() { }
736 ~StagingBufferCollection();
737 // Returns false if maximum total size of buffers would be exceeded.
738 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
739 void ReleaseAllBuffers();
740
741private:
742 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
743 struct BufInfo
744 {
745 VmaAllocation Allocation = VK_NULL_HANDLE;
746 VkBuffer Buffer = VK_NULL_HANDLE;
747 VkDeviceSize Size = VK_WHOLE_SIZE;
748 void* MappedPtr = nullptr;
749 bool Used = false;
750 };
751 std::vector<BufInfo> m_Bufs;
752 // Including both used and unused.
753 VkDeviceSize m_TotalSize = 0;
754};
755
756StagingBufferCollection::~StagingBufferCollection()
757{
758 for(size_t i = m_Bufs.size(); i--; )
759 {
760 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
761 }
762}
763
764bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
765{
766 assert(size <= MAX_TOTAL_SIZE);
767
768 // Try to find existing unused buffer with best size.
769 size_t bestIndex = SIZE_MAX;
770 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
771 {
772 BufInfo& currBufInfo = m_Bufs[i];
773 if(!currBufInfo.Used && currBufInfo.Size >= size &&
774 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
775 {
776 bestIndex = i;
777 }
778 }
779
780 if(bestIndex != SIZE_MAX)
781 {
782 m_Bufs[bestIndex].Used = true;
783 outBuffer = m_Bufs[bestIndex].Buffer;
784 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
785 return true;
786 }
787
788 // Allocate new buffer with requested size.
789 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
790 {
791 BufInfo bufInfo;
792 bufInfo.Size = size;
793 bufInfo.Used = true;
794
795 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
796 bufCreateInfo.size = size;
797 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
798
799 VmaAllocationCreateInfo allocCreateInfo = {};
800 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
801 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
802
803 VmaAllocationInfo allocInfo;
804 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
805 bufInfo.MappedPtr = allocInfo.pMappedData;
806 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
807
808 outBuffer = bufInfo.Buffer;
809 outMappedPtr = bufInfo.MappedPtr;
810
811 m_Bufs.push_back(std::move(bufInfo));
812
813 m_TotalSize += size;
814
815 return true;
816 }
817
818 // There are some unused but smaller buffers: Free them and try again.
819 bool hasUnused = false;
820 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
821 {
822 if(!m_Bufs[i].Used)
823 {
824 hasUnused = true;
825 break;
826 }
827 }
828 if(hasUnused)
829 {
830 for(size_t i = m_Bufs.size(); i--; )
831 {
832 if(!m_Bufs[i].Used)
833 {
834 m_TotalSize -= m_Bufs[i].Size;
835 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
836 m_Bufs.erase(m_Bufs.begin() + i);
837 }
838 }
839
840 return AcquireBuffer(size, outBuffer, outMappedPtr);
841 }
842
843 return false;
844}
845
846void StagingBufferCollection::ReleaseAllBuffers()
847{
848 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
849 {
850 m_Bufs[i].Used = false;
851 }
852}
853
854static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
855{
856 StagingBufferCollection stagingBufs;
857
858 bool cmdBufferStarted = false;
859 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
860 {
861 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
862 if(currAllocInfo.m_Buffer)
863 {
864 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
865
866 VkBuffer stagingBuf = VK_NULL_HANDLE;
867 void* stagingBufMappedPtr = nullptr;
868 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
869 {
870 TEST(cmdBufferStarted);
871 EndSingleTimeCommands();
872 stagingBufs.ReleaseAllBuffers();
873 cmdBufferStarted = false;
874
875 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
876 TEST(ok);
877 }
878
879 // Fill staging buffer.
880 {
881 assert(size % sizeof(uint32_t) == 0);
882 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
883 uint32_t val = currAllocInfo.m_StartValue;
884 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
885 {
886 *stagingValPtr = val;
887 ++stagingValPtr;
888 ++val;
889 }
890 }
891
892 // Issue copy command from staging buffer to destination buffer.
893 if(!cmdBufferStarted)
894 {
895 cmdBufferStarted = true;
896 BeginSingleTimeCommands();
897 }
898
899 VkBufferCopy copy = {};
900 copy.srcOffset = 0;
901 copy.dstOffset = 0;
902 copy.size = size;
903 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
904 }
905 else
906 {
907 TEST(0 && "Images not currently supported.");
908 }
909 }
910
911 if(cmdBufferStarted)
912 {
913 EndSingleTimeCommands();
914 stagingBufs.ReleaseAllBuffers();
915 }
916}
917
918static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
919{
920 StagingBufferCollection stagingBufs;
921
922 bool cmdBufferStarted = false;
923 size_t validateAllocIndexOffset = 0;
924 std::vector<void*> validateStagingBuffers;
925 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
926 {
927 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
928 if(currAllocInfo.m_Buffer)
929 {
930 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
931
932 VkBuffer stagingBuf = VK_NULL_HANDLE;
933 void* stagingBufMappedPtr = nullptr;
934 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
935 {
936 TEST(cmdBufferStarted);
937 EndSingleTimeCommands();
938 cmdBufferStarted = false;
939
940 for(size_t validateIndex = 0;
941 validateIndex < validateStagingBuffers.size();
942 ++validateIndex)
943 {
944 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
945 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
946 TEST(validateSize % sizeof(uint32_t) == 0);
947 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
948 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
949 bool valid = true;
950 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
951 {
952 if(*stagingValPtr != val)
953 {
954 valid = false;
955 break;
956 }
957 ++stagingValPtr;
958 ++val;
959 }
960 TEST(valid);
961 }
962
963 stagingBufs.ReleaseAllBuffers();
964
965 validateAllocIndexOffset = allocInfoIndex;
966 validateStagingBuffers.clear();
967
968 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
969 TEST(ok);
970 }
971
972 // Issue copy command from staging buffer to destination buffer.
973 if(!cmdBufferStarted)
974 {
975 cmdBufferStarted = true;
976 BeginSingleTimeCommands();
977 }
978
979 VkBufferCopy copy = {};
980 copy.srcOffset = 0;
981 copy.dstOffset = 0;
982 copy.size = size;
983 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
984
985 // Sava mapped pointer for later validation.
986 validateStagingBuffers.push_back(stagingBufMappedPtr);
987 }
988 else
989 {
990 TEST(0 && "Images not currently supported.");
991 }
992 }
993
994 if(cmdBufferStarted)
995 {
996 EndSingleTimeCommands();
997
998 for(size_t validateIndex = 0;
999 validateIndex < validateStagingBuffers.size();
1000 ++validateIndex)
1001 {
1002 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
1003 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
1004 TEST(validateSize % sizeof(uint32_t) == 0);
1005 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
1006 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
1007 bool valid = true;
1008 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
1009 {
1010 if(*stagingValPtr != val)
1011 {
1012 valid = false;
1013 break;
1014 }
1015 ++stagingValPtr;
1016 ++val;
1017 }
1018 TEST(valid);
1019 }
1020
1021 stagingBufs.ReleaseAllBuffers();
1022 }
1023}
1024
Adam Sawickib8333fb2018-03-13 16:15:53 +01001025static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1026{
1027 outMemReq = {};
1028 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1029 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1030}
1031
1032static void CreateBuffer(
1033 VmaPool pool,
1034 const VkBufferCreateInfo& bufCreateInfo,
1035 bool persistentlyMapped,
1036 AllocInfo& outAllocInfo)
1037{
1038 outAllocInfo = {};
1039 outAllocInfo.m_BufferInfo = bufCreateInfo;
1040
1041 VmaAllocationCreateInfo allocCreateInfo = {};
1042 allocCreateInfo.pool = pool;
1043 if(persistentlyMapped)
1044 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1045
1046 VmaAllocationInfo vmaAllocInfo = {};
1047 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1048
1049 // Setup StartValue and fill.
1050 {
1051 outAllocInfo.m_StartValue = (uint32_t)rand();
1052 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001053 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001054 if(!persistentlyMapped)
1055 {
1056 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1057 }
1058
1059 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001060 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001061 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1062 data[i] = value++;
1063
1064 if(!persistentlyMapped)
1065 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1066 }
1067}
1068
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001069static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001070{
1071 outAllocation.m_Allocation = nullptr;
1072 outAllocation.m_Buffer = nullptr;
1073 outAllocation.m_Image = nullptr;
1074 outAllocation.m_StartValue = (uint32_t)rand();
1075
1076 VmaAllocationCreateInfo vmaMemReq;
1077 GetMemReq(vmaMemReq);
1078
1079 VmaAllocationInfo allocInfo;
1080
1081 const bool isBuffer = true;//(rand() & 0x1) != 0;
1082 const bool isLarge = (rand() % 16) == 0;
1083 if(isBuffer)
1084 {
1085 const uint32_t bufferSize = isLarge ?
1086 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1087 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1088
1089 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1090 bufferInfo.size = bufferSize;
1091 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1092
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001093 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001094 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001095 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001096 }
1097 else
1098 {
1099 const uint32_t imageSizeX = isLarge ?
1100 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1101 rand() % 1024 + 1; // 1 ... 1024
1102 const uint32_t imageSizeY = isLarge ?
1103 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1104 rand() % 1024 + 1; // 1 ... 1024
1105
1106 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1107 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1108 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1109 imageInfo.extent.width = imageSizeX;
1110 imageInfo.extent.height = imageSizeY;
1111 imageInfo.extent.depth = 1;
1112 imageInfo.mipLevels = 1;
1113 imageInfo.arrayLayers = 1;
1114 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1115 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1116 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1117 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1118
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001119 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001120 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001121 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001122 }
1123
1124 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1125 if(allocInfo.pMappedData == nullptr)
1126 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001127 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001128 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001129 }
1130
1131 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001132 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001133 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1134 data[i] = value++;
1135
1136 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001137 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001138}
1139
1140static void DestroyAllocation(const AllocInfo& allocation)
1141{
1142 if(allocation.m_Buffer)
1143 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1144 else
1145 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1146}
1147
1148static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1149{
1150 for(size_t i = allocations.size(); i--; )
1151 DestroyAllocation(allocations[i]);
1152 allocations.clear();
1153}
1154
1155static void ValidateAllocationData(const AllocInfo& allocation)
1156{
1157 VmaAllocationInfo allocInfo;
1158 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1159
1160 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1161 if(allocInfo.pMappedData == nullptr)
1162 {
1163 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001164 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001165 }
1166
1167 uint32_t value = allocation.m_StartValue;
1168 bool ok = true;
1169 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001170 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001171 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1172 {
1173 if(data[i] != value++)
1174 {
1175 ok = false;
1176 break;
1177 }
1178 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001179 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001180
1181 if(allocInfo.pMappedData == nullptr)
1182 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1183}
1184
1185static void RecreateAllocationResource(AllocInfo& allocation)
1186{
1187 VmaAllocationInfo allocInfo;
1188 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1189
1190 if(allocation.m_Buffer)
1191 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001192 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001193
Adam Sawicki1f84f622019-07-02 13:40:01 +02001194 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, g_Allocs, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001195 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001196
1197 // Just to silence validation layer warnings.
1198 VkMemoryRequirements vkMemReq;
1199 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001200 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001201
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001202 res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001203 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001204 }
1205 else
1206 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001207 vkDestroyImage(g_hDevice, allocation.m_Image, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001208
Adam Sawicki1f84f622019-07-02 13:40:01 +02001209 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, g_Allocs, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001210 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001211
1212 // Just to silence validation layer warnings.
1213 VkMemoryRequirements vkMemReq;
1214 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1215
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001216 res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001217 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001218 }
1219}
1220
1221static void Defragment(AllocInfo* allocs, size_t allocCount,
1222 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1223 VmaDefragmentationStats* defragmentationStats = nullptr)
1224{
1225 std::vector<VmaAllocation> vmaAllocs(allocCount);
1226 for(size_t i = 0; i < allocCount; ++i)
1227 vmaAllocs[i] = allocs[i].m_Allocation;
1228
1229 std::vector<VkBool32> allocChanged(allocCount);
1230
1231 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1232 defragmentationInfo, defragmentationStats) );
1233
1234 for(size_t i = 0; i < allocCount; ++i)
1235 {
1236 if(allocChanged[i])
1237 {
1238 RecreateAllocationResource(allocs[i]);
1239 }
1240 }
1241}
1242
1243static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1244{
1245 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1246 ValidateAllocationData(allocInfo);
1247 });
1248}
1249
1250void TestDefragmentationSimple()
1251{
1252 wprintf(L"Test defragmentation simple\n");
1253
1254 RandomNumberGenerator rand(667);
1255
1256 const VkDeviceSize BUF_SIZE = 0x10000;
1257 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1258
1259 const VkDeviceSize MIN_BUF_SIZE = 32;
1260 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1261 auto RandomBufSize = [&]() -> VkDeviceSize {
1262 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1263 };
1264
1265 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1266 bufCreateInfo.size = BUF_SIZE;
1267 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1268
1269 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1270 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1271
1272 uint32_t memTypeIndex = UINT32_MAX;
1273 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1274
1275 VmaPoolCreateInfo poolCreateInfo = {};
1276 poolCreateInfo.blockSize = BLOCK_SIZE;
1277 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1278
1279 VmaPool pool;
1280 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1281
Adam Sawickie1681912018-11-23 17:50:12 +01001282 // Defragmentation of empty pool.
1283 {
1284 VmaDefragmentationInfo2 defragInfo = {};
1285 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1286 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1287 defragInfo.poolCount = 1;
1288 defragInfo.pPools = &pool;
1289
1290 VmaDefragmentationStats defragStats = {};
1291 VmaDefragmentationContext defragCtx = nullptr;
1292 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1293 TEST(res >= VK_SUCCESS);
1294 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1295 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1296 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1297 }
1298
Adam Sawickib8333fb2018-03-13 16:15:53 +01001299 std::vector<AllocInfo> allocations;
1300
1301 // persistentlyMappedOption = 0 - not persistently mapped.
1302 // persistentlyMappedOption = 1 - persistently mapped.
1303 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1304 {
1305 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1306 const bool persistentlyMapped = persistentlyMappedOption != 0;
1307
1308 // # Test 1
1309 // Buffers of fixed size.
1310 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1311 // Expected result: at least 1 block freed.
1312 {
1313 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1314 {
1315 AllocInfo allocInfo;
1316 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1317 allocations.push_back(allocInfo);
1318 }
1319
1320 for(size_t i = 1; i < allocations.size(); ++i)
1321 {
1322 DestroyAllocation(allocations[i]);
1323 allocations.erase(allocations.begin() + i);
1324 }
1325
1326 VmaDefragmentationStats defragStats;
1327 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001328 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1329 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001330
1331 ValidateAllocationsData(allocations.data(), allocations.size());
1332
1333 DestroyAllAllocations(allocations);
1334 }
1335
1336 // # Test 2
1337 // Buffers of fixed size.
1338 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1339 // Expected result: Each of 4 interations makes some progress.
1340 {
1341 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1342 {
1343 AllocInfo allocInfo;
1344 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1345 allocations.push_back(allocInfo);
1346 }
1347
1348 for(size_t i = 1; i < allocations.size(); ++i)
1349 {
1350 DestroyAllocation(allocations[i]);
1351 allocations.erase(allocations.begin() + i);
1352 }
1353
1354 VmaDefragmentationInfo defragInfo = {};
1355 defragInfo.maxAllocationsToMove = 1;
1356 defragInfo.maxBytesToMove = BUF_SIZE;
1357
1358 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1359 {
1360 VmaDefragmentationStats defragStats;
1361 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001362 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001363 }
1364
1365 ValidateAllocationsData(allocations.data(), allocations.size());
1366
1367 DestroyAllAllocations(allocations);
1368 }
1369
1370 // # Test 3
1371 // Buffers of variable size.
1372 // Create a number of buffers. Remove some percent of them.
1373 // Defragment while having some percent of them unmovable.
1374 // Expected result: Just simple validation.
1375 {
1376 for(size_t i = 0; i < 100; ++i)
1377 {
1378 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1379 localBufCreateInfo.size = RandomBufSize();
1380
1381 AllocInfo allocInfo;
1382 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1383 allocations.push_back(allocInfo);
1384 }
1385
1386 const uint32_t percentToDelete = 60;
1387 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1388 for(size_t i = 0; i < numberToDelete; ++i)
1389 {
1390 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1391 DestroyAllocation(allocations[indexToDelete]);
1392 allocations.erase(allocations.begin() + indexToDelete);
1393 }
1394
1395 // Non-movable allocations will be at the beginning of allocations array.
1396 const uint32_t percentNonMovable = 20;
1397 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1398 for(size_t i = 0; i < numberNonMovable; ++i)
1399 {
1400 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1401 if(indexNonMovable != i)
1402 std::swap(allocations[i], allocations[indexNonMovable]);
1403 }
1404
1405 VmaDefragmentationStats defragStats;
1406 Defragment(
1407 allocations.data() + numberNonMovable,
1408 allocations.size() - numberNonMovable,
1409 nullptr, &defragStats);
1410
1411 ValidateAllocationsData(allocations.data(), allocations.size());
1412
1413 DestroyAllAllocations(allocations);
1414 }
1415 }
1416
Adam Sawicki647cf242018-11-23 17:58:00 +01001417 /*
1418 Allocation that must be move to an overlapping place using memmove().
1419 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1420 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001421 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001422 {
1423 AllocInfo allocInfo[2];
1424
1425 bufCreateInfo.size = BUF_SIZE;
1426 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1427 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1428 bufCreateInfo.size = biggerBufSize;
1429 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1430
1431 DestroyAllocation(allocInfo[0]);
1432
1433 VmaDefragmentationStats defragStats;
1434 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1435 // If this fails, it means we couldn't do memmove with overlapping regions.
1436 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1437
1438 ValidateAllocationsData(&allocInfo[1], 1);
1439 DestroyAllocation(allocInfo[1]);
1440 }
1441
Adam Sawickib8333fb2018-03-13 16:15:53 +01001442 vmaDestroyPool(g_hAllocator, pool);
1443}
1444
Adam Sawicki52076eb2018-11-22 16:14:50 +01001445void TestDefragmentationWholePool()
1446{
1447 wprintf(L"Test defragmentation whole pool\n");
1448
1449 RandomNumberGenerator rand(668);
1450
1451 const VkDeviceSize BUF_SIZE = 0x10000;
1452 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1453
1454 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1455 bufCreateInfo.size = BUF_SIZE;
1456 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1457
1458 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1459 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1460
1461 uint32_t memTypeIndex = UINT32_MAX;
1462 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1463
1464 VmaPoolCreateInfo poolCreateInfo = {};
1465 poolCreateInfo.blockSize = BLOCK_SIZE;
1466 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1467
1468 VmaDefragmentationStats defragStats[2];
1469 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1470 {
1471 VmaPool pool;
1472 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1473
1474 std::vector<AllocInfo> allocations;
1475
1476 // Buffers of fixed size.
1477 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1478 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1479 {
1480 AllocInfo allocInfo;
1481 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1482 allocations.push_back(allocInfo);
1483 }
1484
1485 for(size_t i = 1; i < allocations.size(); ++i)
1486 {
1487 DestroyAllocation(allocations[i]);
1488 allocations.erase(allocations.begin() + i);
1489 }
1490
1491 VmaDefragmentationInfo2 defragInfo = {};
1492 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1493 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1494 std::vector<VmaAllocation> allocationsToDefrag;
1495 if(caseIndex == 0)
1496 {
1497 defragInfo.poolCount = 1;
1498 defragInfo.pPools = &pool;
1499 }
1500 else
1501 {
1502 const size_t allocCount = allocations.size();
1503 allocationsToDefrag.resize(allocCount);
1504 std::transform(
1505 allocations.begin(), allocations.end(),
1506 allocationsToDefrag.begin(),
1507 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1508 defragInfo.allocationCount = (uint32_t)allocCount;
1509 defragInfo.pAllocations = allocationsToDefrag.data();
1510 }
1511
1512 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1513 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1514 TEST(res >= VK_SUCCESS);
1515 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1516
1517 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1518
1519 ValidateAllocationsData(allocations.data(), allocations.size());
1520
1521 DestroyAllAllocations(allocations);
1522
1523 vmaDestroyPool(g_hAllocator, pool);
1524 }
1525
1526 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1527 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1528 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1529 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1530}
1531
Adam Sawickib8333fb2018-03-13 16:15:53 +01001532void TestDefragmentationFull()
1533{
1534 std::vector<AllocInfo> allocations;
1535
1536 // Create initial allocations.
1537 for(size_t i = 0; i < 400; ++i)
1538 {
1539 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001540 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001541 allocations.push_back(allocation);
1542 }
1543
1544 // Delete random allocations
1545 const size_t allocationsToDeletePercent = 80;
1546 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1547 for(size_t i = 0; i < allocationsToDelete; ++i)
1548 {
1549 size_t index = (size_t)rand() % allocations.size();
1550 DestroyAllocation(allocations[index]);
1551 allocations.erase(allocations.begin() + index);
1552 }
1553
1554 for(size_t i = 0; i < allocations.size(); ++i)
1555 ValidateAllocationData(allocations[i]);
1556
Adam Sawicki0667e332018-08-24 17:26:44 +02001557 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001558
1559 {
1560 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1561 for(size_t i = 0; i < allocations.size(); ++i)
1562 vmaAllocations[i] = allocations[i].m_Allocation;
1563
1564 const size_t nonMovablePercent = 0;
1565 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1566 for(size_t i = 0; i < nonMovableCount; ++i)
1567 {
1568 size_t index = (size_t)rand() % vmaAllocations.size();
1569 vmaAllocations.erase(vmaAllocations.begin() + index);
1570 }
1571
1572 const uint32_t defragCount = 1;
1573 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1574 {
1575 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1576
1577 VmaDefragmentationInfo defragmentationInfo;
1578 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1579 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1580
1581 wprintf(L"Defragmentation #%u\n", defragIndex);
1582
1583 time_point begTime = std::chrono::high_resolution_clock::now();
1584
1585 VmaDefragmentationStats stats;
1586 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001587 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001588
1589 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1590
1591 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1592 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1593 wprintf(L"Time: %.2f s\n", defragmentDuration);
1594
1595 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1596 {
1597 if(allocationsChanged[i])
1598 {
1599 RecreateAllocationResource(allocations[i]);
1600 }
1601 }
1602
1603 for(size_t i = 0; i < allocations.size(); ++i)
1604 ValidateAllocationData(allocations[i]);
1605
Adam Sawicki0667e332018-08-24 17:26:44 +02001606 //wchar_t fileName[MAX_PATH];
1607 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1608 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001609 }
1610 }
1611
1612 // Destroy all remaining allocations.
1613 DestroyAllAllocations(allocations);
1614}
1615
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001616static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001617{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001618 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001619 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001620
1621 std::vector<AllocInfo> allocations;
1622
1623 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001624 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1625 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001626 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001627 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1628 const size_t percentToLeave = 30;
1629 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001630 RandomNumberGenerator rand = { 234522 };
1631
1632 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001633
1634 VmaAllocationCreateInfo allocCreateInfo = {};
1635 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001636 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001637
1638 // Create all intended buffers.
1639 for(size_t i = 0; i < bufCount; ++i)
1640 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001641 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1642
1643 if(rand.Generate() % 100 < percentNonMovable)
1644 {
1645 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1646 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1647 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1648 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1649 }
1650 else
1651 {
1652 // Different usage just to see different color in output from VmaDumpVis.
1653 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1654 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1655 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1656 // And in JSON dump.
1657 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1658 }
1659
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001660 AllocInfo alloc;
1661 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1662 alloc.m_StartValue = rand.Generate();
1663 allocations.push_back(alloc);
1664 }
1665
1666 // Destroy some percentage of them.
1667 {
1668 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1669 for(size_t i = 0; i < buffersToDestroy; ++i)
1670 {
1671 const size_t index = rand.Generate() % allocations.size();
1672 allocations[index].Destroy();
1673 allocations.erase(allocations.begin() + index);
1674 }
1675 }
1676
1677 // Fill them with meaningful data.
1678 UploadGpuData(allocations.data(), allocations.size());
1679
Adam Sawickic6ede152018-11-16 17:04:14 +01001680 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001681 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001682 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001683
1684 // Defragment using GPU only.
1685 {
1686 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001687
Adam Sawickic6ede152018-11-16 17:04:14 +01001688 std::vector<VmaAllocation> allocationPtrs;
1689 std::vector<VkBool32> allocationChanged;
1690 std::vector<size_t> allocationOriginalIndex;
1691
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001692 for(size_t i = 0; i < allocCount; ++i)
1693 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001694 VmaAllocationInfo allocInfo = {};
1695 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1696 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1697 {
1698 allocationPtrs.push_back(allocations[i].m_Allocation);
1699 allocationChanged.push_back(VK_FALSE);
1700 allocationOriginalIndex.push_back(i);
1701 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001702 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001703
1704 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001705
1706 BeginSingleTimeCommands();
1707
1708 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001709 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001710 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001711 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001712 defragInfo.pAllocationsChanged = allocationChanged.data();
1713 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001714 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1715 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1716
1717 VmaDefragmentationStats stats = {};
1718 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1719 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1720 TEST(res >= VK_SUCCESS);
1721
1722 EndSingleTimeCommands();
1723
1724 vmaDefragmentationEnd(g_hAllocator, ctx);
1725
Adam Sawickic6ede152018-11-16 17:04:14 +01001726 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001727 {
1728 if(allocationChanged[i])
1729 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001730 const size_t origAllocIndex = allocationOriginalIndex[i];
1731 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001732 }
1733 }
1734
Adam Sawicki4d844e22019-01-24 16:21:05 +01001735 // If corruption detection is enabled, GPU defragmentation may not work on
1736 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001737 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001738 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1739 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001740 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001741 }
1742
1743 ValidateGpuData(allocations.data(), allocations.size());
1744
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001745 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001746 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001747
1748 // Destroy all remaining buffers.
1749 for(size_t i = allocations.size(); i--; )
1750 {
1751 allocations[i].Destroy();
1752 }
Adam Sawicki05704002018-11-08 16:07:29 +01001753
1754 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001755}
1756
Adam Sawickib8333fb2018-03-13 16:15:53 +01001757static void TestUserData()
1758{
1759 VkResult res;
1760
1761 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1762 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1763 bufCreateInfo.size = 0x10000;
1764
1765 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1766 {
1767 // Opaque pointer
1768 {
1769
1770 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1771 void* pointerToSomething = &res;
1772
1773 VmaAllocationCreateInfo allocCreateInfo = {};
1774 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1775 allocCreateInfo.pUserData = numberAsPointer;
1776 if(testIndex == 1)
1777 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1778
1779 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1780 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001781 TEST(res == VK_SUCCESS);
1782 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001783
1784 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001785 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001786
1787 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1788 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001789 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001790
1791 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1792 }
1793
1794 // String
1795 {
1796 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1797 const char* name2 = "2";
1798 const size_t name1Len = strlen(name1);
1799
1800 char* name1Buf = new char[name1Len + 1];
1801 strcpy_s(name1Buf, name1Len + 1, name1);
1802
1803 VmaAllocationCreateInfo allocCreateInfo = {};
1804 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1805 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1806 allocCreateInfo.pUserData = name1Buf;
1807 if(testIndex == 1)
1808 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1809
1810 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1811 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001812 TEST(res == VK_SUCCESS);
1813 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1814 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001815
1816 delete[] name1Buf;
1817
1818 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001819 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001820
1821 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1822 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001823 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001824
1825 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1826 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001827 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001828
1829 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1830 }
1831 }
1832}
1833
Adam Sawicki370ab182018-11-08 16:31:00 +01001834static void TestInvalidAllocations()
1835{
1836 VkResult res;
1837
1838 VmaAllocationCreateInfo allocCreateInfo = {};
1839 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1840
1841 // Try to allocate 0 bytes.
1842 {
1843 VkMemoryRequirements memReq = {};
1844 memReq.size = 0; // !!!
1845 memReq.alignment = 4;
1846 memReq.memoryTypeBits = UINT32_MAX;
1847 VmaAllocation alloc = VK_NULL_HANDLE;
1848 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1849 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1850 }
1851
1852 // Try to create buffer with size = 0.
1853 {
1854 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1855 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1856 bufCreateInfo.size = 0; // !!!
1857 VkBuffer buf = VK_NULL_HANDLE;
1858 VmaAllocation alloc = VK_NULL_HANDLE;
1859 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1860 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1861 }
1862
1863 // Try to create image with one dimension = 0.
1864 {
1865 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1866 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1867 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1868 imageCreateInfo.extent.width = 128;
1869 imageCreateInfo.extent.height = 0; // !!!
1870 imageCreateInfo.extent.depth = 1;
1871 imageCreateInfo.mipLevels = 1;
1872 imageCreateInfo.arrayLayers = 1;
1873 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1874 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1875 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1876 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1877 VkImage image = VK_NULL_HANDLE;
1878 VmaAllocation alloc = VK_NULL_HANDLE;
1879 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1880 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1881 }
1882}
1883
Adam Sawickib8333fb2018-03-13 16:15:53 +01001884static void TestMemoryRequirements()
1885{
1886 VkResult res;
1887 VkBuffer buf;
1888 VmaAllocation alloc;
1889 VmaAllocationInfo allocInfo;
1890
1891 const VkPhysicalDeviceMemoryProperties* memProps;
1892 vmaGetMemoryProperties(g_hAllocator, &memProps);
1893
1894 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1895 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1896 bufInfo.size = 128;
1897
1898 VmaAllocationCreateInfo allocCreateInfo = {};
1899
1900 // No requirements.
1901 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001902 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001903 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1904
1905 // Usage.
1906 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1907 allocCreateInfo.requiredFlags = 0;
1908 allocCreateInfo.preferredFlags = 0;
1909 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1910
1911 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001912 TEST(res == VK_SUCCESS);
1913 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001914 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1915
1916 // Required flags, preferred flags.
1917 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1918 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1919 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1920 allocCreateInfo.memoryTypeBits = 0;
1921
1922 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001923 TEST(res == VK_SUCCESS);
1924 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1925 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001926 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1927
1928 // memoryTypeBits.
1929 const uint32_t memType = allocInfo.memoryType;
1930 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1931 allocCreateInfo.requiredFlags = 0;
1932 allocCreateInfo.preferredFlags = 0;
1933 allocCreateInfo.memoryTypeBits = 1u << memType;
1934
1935 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001936 TEST(res == VK_SUCCESS);
1937 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001938 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1939
1940}
1941
1942static void TestBasics()
1943{
1944 VkResult res;
1945
1946 TestMemoryRequirements();
1947
1948 // Lost allocation
1949 {
1950 VmaAllocation alloc = VK_NULL_HANDLE;
1951 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001952 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001953
1954 VmaAllocationInfo allocInfo;
1955 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001956 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1957 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001958
1959 vmaFreeMemory(g_hAllocator, alloc);
1960 }
1961
1962 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1963 {
1964 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1965 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1966 bufCreateInfo.size = 128;
1967
1968 VmaAllocationCreateInfo allocCreateInfo = {};
1969 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1970 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1971
1972 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1973 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001974 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001975
1976 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1977
1978 // Same with OWN_MEMORY.
1979 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1980
1981 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001982 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001983
1984 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1985 }
1986
1987 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001988
1989 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001990}
1991
Adam Sawickiddcbf8c2019-11-22 15:22:42 +01001992static void TestPool_MinBlockCount()
1993{
1994#if defined(VMA_DEBUG_MARGIN) && VMA_DEBUG_MARGIN > 0
1995 return;
1996#endif
1997
1998 wprintf(L"Test Pool MinBlockCount\n");
1999 VkResult res;
2000
2001 static const VkDeviceSize ALLOC_SIZE = 512ull * 1024;
2002 static const VkDeviceSize BLOCK_SIZE = ALLOC_SIZE * 2; // Each block can fit 2 allocations.
2003
2004 VmaAllocationCreateInfo allocCreateInfo = {};
2005 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_COPY;
2006
2007 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2008 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2009 bufCreateInfo.size = ALLOC_SIZE;
2010
2011 VmaPoolCreateInfo poolCreateInfo = {};
2012 poolCreateInfo.blockSize = BLOCK_SIZE;
2013 poolCreateInfo.minBlockCount = 2; // At least 2 blocks always present.
2014 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
2015 TEST(res == VK_SUCCESS);
2016
2017 VmaPool pool = VK_NULL_HANDLE;
2018 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
2019 TEST(res == VK_SUCCESS && pool != VK_NULL_HANDLE);
2020
2021 // Check that there are 2 blocks preallocated as requested.
2022 VmaPoolStats begPoolStats = {};
2023 vmaGetPoolStats(g_hAllocator, pool, &begPoolStats);
2024 TEST(begPoolStats.blockCount == 2 && begPoolStats.allocationCount == 0 && begPoolStats.size == BLOCK_SIZE * 2);
2025
2026 // Allocate 5 buffers to create 3 blocks.
2027 static const uint32_t BUF_COUNT = 5;
2028 allocCreateInfo.pool = pool;
2029 std::vector<AllocInfo> allocs(BUF_COUNT);
2030 for(uint32_t i = 0; i < BUF_COUNT; ++i)
2031 {
2032 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &allocs[i].m_Buffer, &allocs[i].m_Allocation, nullptr);
2033 TEST(res == VK_SUCCESS && allocs[i].m_Buffer != VK_NULL_HANDLE && allocs[i].m_Allocation != VK_NULL_HANDLE);
2034 }
2035
2036 // Check that there are really 3 blocks.
2037 VmaPoolStats poolStats2 = {};
2038 vmaGetPoolStats(g_hAllocator, pool, &poolStats2);
2039 TEST(poolStats2.blockCount == 3 && poolStats2.allocationCount == BUF_COUNT && poolStats2.size == BLOCK_SIZE * 3);
2040
2041 // Free two first allocations to make one block empty.
2042 allocs[0].Destroy();
2043 allocs[1].Destroy();
2044
2045 // Check that there are still 3 blocks due to hysteresis.
2046 VmaPoolStats poolStats3 = {};
2047 vmaGetPoolStats(g_hAllocator, pool, &poolStats3);
2048 TEST(poolStats3.blockCount == 3 && poolStats3.allocationCount == BUF_COUNT - 2 && poolStats2.size == BLOCK_SIZE * 3);
2049
2050 // Free the last allocation to make second block empty.
2051 allocs[BUF_COUNT - 1].Destroy();
2052
2053 // Check that there are now 2 blocks only.
2054 VmaPoolStats poolStats4 = {};
2055 vmaGetPoolStats(g_hAllocator, pool, &poolStats4);
2056 TEST(poolStats4.blockCount == 2 && poolStats4.allocationCount == BUF_COUNT - 3 && poolStats4.size == BLOCK_SIZE * 2);
2057
2058 // Cleanup.
2059 for(size_t i = allocs.size(); i--; )
2060 {
2061 allocs[i].Destroy();
2062 }
2063 vmaDestroyPool(g_hAllocator, pool);
2064}
2065
Adam Sawickib8333fb2018-03-13 16:15:53 +01002066void TestHeapSizeLimit()
2067{
Adam Sawickib3f51102019-11-18 13:05:56 +01002068 const VkDeviceSize HEAP_SIZE_LIMIT = 200ull * 1024 * 1024; // 200 MB
2069 const VkDeviceSize BLOCK_SIZE = 20ull * 1024 * 1024; // 20 MB
Adam Sawickib8333fb2018-03-13 16:15:53 +01002070
2071 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
2072 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
2073 {
2074 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
2075 }
2076
2077 VmaAllocatorCreateInfo allocatorCreateInfo = {};
2078 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
2079 allocatorCreateInfo.device = g_hDevice;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002080 allocatorCreateInfo.instance = g_hVulkanInstance;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002081 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
2082
2083 VmaAllocator hAllocator;
2084 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002085 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002086
2087 struct Item
2088 {
2089 VkBuffer hBuf;
2090 VmaAllocation hAlloc;
2091 };
2092 std::vector<Item> items;
2093
2094 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2095 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2096
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002097 // 1. Allocate two blocks of dedicated memory, half the size of BLOCK_SIZE.
2098 VmaAllocationInfo dedicatedAllocInfo;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002099 {
2100 VmaAllocationCreateInfo allocCreateInfo = {};
2101 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2102 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2103
2104 bufCreateInfo.size = BLOCK_SIZE / 2;
2105
2106 for(size_t i = 0; i < 2; ++i)
2107 {
2108 Item item;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002109 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &dedicatedAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002110 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002111 items.push_back(item);
2112 }
2113 }
2114
2115 // Create pool to make sure allocations must be out of this memory type.
2116 VmaPoolCreateInfo poolCreateInfo = {};
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002117 poolCreateInfo.memoryTypeIndex = dedicatedAllocInfo.memoryType;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002118 poolCreateInfo.blockSize = BLOCK_SIZE;
2119
2120 VmaPool hPool;
2121 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002122 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002123
2124 // 2. Allocate normal buffers from all the remaining memory.
2125 {
2126 VmaAllocationCreateInfo allocCreateInfo = {};
2127 allocCreateInfo.pool = hPool;
2128
2129 bufCreateInfo.size = BLOCK_SIZE / 2;
2130
2131 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2132 for(size_t i = 0; i < bufCount; ++i)
2133 {
2134 Item item;
2135 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002136 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002137 items.push_back(item);
2138 }
2139 }
2140
2141 // 3. Allocation of one more (even small) buffer should fail.
2142 {
2143 VmaAllocationCreateInfo allocCreateInfo = {};
2144 allocCreateInfo.pool = hPool;
2145
2146 bufCreateInfo.size = 128;
2147
2148 VkBuffer hBuf;
2149 VmaAllocation hAlloc;
2150 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002151 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002152 }
2153
2154 // Destroy everything.
2155 for(size_t i = items.size(); i--; )
2156 {
2157 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2158 }
2159
2160 vmaDestroyPool(hAllocator, hPool);
2161
2162 vmaDestroyAllocator(hAllocator);
2163}
2164
Adam Sawicki212a4a62018-06-14 15:44:45 +02002165#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002166static void TestDebugMargin()
2167{
2168 if(VMA_DEBUG_MARGIN == 0)
2169 {
2170 return;
2171 }
2172
2173 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002174 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002175
2176 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002177 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002178
2179 // Create few buffers of different size.
2180 const size_t BUF_COUNT = 10;
2181 BufferInfo buffers[BUF_COUNT];
2182 VmaAllocationInfo allocInfo[BUF_COUNT];
2183 for(size_t i = 0; i < 10; ++i)
2184 {
2185 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002186 // Last one will be mapped.
2187 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002188
2189 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002190 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002191 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002192 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002193
2194 if(i == BUF_COUNT - 1)
2195 {
2196 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002197 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002198 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2199 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2200 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002201 }
2202
2203 // Check if their offsets preserve margin between them.
2204 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2205 {
2206 if(lhs.deviceMemory != rhs.deviceMemory)
2207 {
2208 return lhs.deviceMemory < rhs.deviceMemory;
2209 }
2210 return lhs.offset < rhs.offset;
2211 });
2212 for(size_t i = 1; i < BUF_COUNT; ++i)
2213 {
2214 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2215 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002216 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002217 }
2218 }
2219
Adam Sawicki212a4a62018-06-14 15:44:45 +02002220 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002221 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002222
Adam Sawicki73b16652018-06-11 16:39:25 +02002223 // Destroy all buffers.
2224 for(size_t i = BUF_COUNT; i--; )
2225 {
2226 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2227 }
2228}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002229#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002230
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002231static void TestLinearAllocator()
2232{
2233 wprintf(L"Test linear allocator\n");
2234
2235 RandomNumberGenerator rand{645332};
2236
2237 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2238 sampleBufCreateInfo.size = 1024; // Whatever.
2239 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2240
2241 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2242 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2243
2244 VmaPoolCreateInfo poolCreateInfo = {};
2245 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002246 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002247
Adam Sawickiee082772018-06-20 17:45:49 +02002248 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002249 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2250 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2251
2252 VmaPool pool = nullptr;
2253 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002254 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002255
2256 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2257
2258 VmaAllocationCreateInfo allocCreateInfo = {};
2259 allocCreateInfo.pool = pool;
2260
2261 constexpr size_t maxBufCount = 100;
2262 std::vector<BufferInfo> bufInfo;
2263
2264 constexpr VkDeviceSize bufSizeMin = 16;
2265 constexpr VkDeviceSize bufSizeMax = 1024;
2266 VmaAllocationInfo allocInfo;
2267 VkDeviceSize prevOffset = 0;
2268
2269 // Test one-time free.
2270 for(size_t i = 0; i < 2; ++i)
2271 {
2272 // Allocate number of buffers of varying size that surely fit into this block.
2273 VkDeviceSize bufSumSize = 0;
2274 for(size_t i = 0; i < maxBufCount; ++i)
2275 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002276 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002277 BufferInfo newBufInfo;
2278 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2279 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002280 TEST(res == VK_SUCCESS);
2281 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002282 bufInfo.push_back(newBufInfo);
2283 prevOffset = allocInfo.offset;
2284 bufSumSize += bufCreateInfo.size;
2285 }
2286
2287 // Validate pool stats.
2288 VmaPoolStats stats;
2289 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002290 TEST(stats.size == poolCreateInfo.blockSize);
2291 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2292 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002293
2294 // Destroy the buffers in random order.
2295 while(!bufInfo.empty())
2296 {
2297 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2298 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2299 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2300 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2301 }
2302 }
2303
2304 // Test stack.
2305 {
2306 // Allocate number of buffers of varying size that surely fit into this block.
2307 for(size_t i = 0; i < maxBufCount; ++i)
2308 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002309 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002310 BufferInfo newBufInfo;
2311 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2312 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002313 TEST(res == VK_SUCCESS);
2314 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002315 bufInfo.push_back(newBufInfo);
2316 prevOffset = allocInfo.offset;
2317 }
2318
2319 // Destroy few buffers from top of the stack.
2320 for(size_t i = 0; i < maxBufCount / 5; ++i)
2321 {
2322 const BufferInfo& currBufInfo = bufInfo.back();
2323 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2324 bufInfo.pop_back();
2325 }
2326
2327 // Create some more
2328 for(size_t i = 0; i < maxBufCount / 5; ++i)
2329 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002330 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002331 BufferInfo newBufInfo;
2332 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2333 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002334 TEST(res == VK_SUCCESS);
2335 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002336 bufInfo.push_back(newBufInfo);
2337 prevOffset = allocInfo.offset;
2338 }
2339
2340 // Destroy the buffers in reverse order.
2341 while(!bufInfo.empty())
2342 {
2343 const BufferInfo& currBufInfo = bufInfo.back();
2344 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2345 bufInfo.pop_back();
2346 }
2347 }
2348
Adam Sawickiee082772018-06-20 17:45:49 +02002349 // Test ring buffer.
2350 {
2351 // Allocate number of buffers that surely fit into this block.
2352 bufCreateInfo.size = bufSizeMax;
2353 for(size_t i = 0; i < maxBufCount; ++i)
2354 {
2355 BufferInfo newBufInfo;
2356 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2357 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002358 TEST(res == VK_SUCCESS);
2359 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002360 bufInfo.push_back(newBufInfo);
2361 prevOffset = allocInfo.offset;
2362 }
2363
2364 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2365 const size_t buffersPerIter = maxBufCount / 10 - 1;
2366 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2367 for(size_t iter = 0; iter < iterCount; ++iter)
2368 {
2369 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2370 {
2371 const BufferInfo& currBufInfo = bufInfo.front();
2372 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2373 bufInfo.erase(bufInfo.begin());
2374 }
2375 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2376 {
2377 BufferInfo newBufInfo;
2378 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2379 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002380 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002381 bufInfo.push_back(newBufInfo);
2382 }
2383 }
2384
2385 // Allocate buffers until we reach out-of-memory.
2386 uint32_t debugIndex = 0;
2387 while(res == VK_SUCCESS)
2388 {
2389 BufferInfo newBufInfo;
2390 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2391 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2392 if(res == VK_SUCCESS)
2393 {
2394 bufInfo.push_back(newBufInfo);
2395 }
2396 else
2397 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002398 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002399 }
2400 ++debugIndex;
2401 }
2402
2403 // Destroy the buffers in random order.
2404 while(!bufInfo.empty())
2405 {
2406 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2407 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2408 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2409 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2410 }
2411 }
2412
Adam Sawicki680b2252018-08-22 14:47:32 +02002413 // Test double stack.
2414 {
2415 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2416 VkDeviceSize prevOffsetLower = 0;
2417 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2418 for(size_t i = 0; i < maxBufCount; ++i)
2419 {
2420 const bool upperAddress = (i % 2) != 0;
2421 if(upperAddress)
2422 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2423 else
2424 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002425 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002426 BufferInfo newBufInfo;
2427 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2428 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002429 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002430 if(upperAddress)
2431 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002432 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002433 prevOffsetUpper = allocInfo.offset;
2434 }
2435 else
2436 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002437 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002438 prevOffsetLower = allocInfo.offset;
2439 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002440 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002441 bufInfo.push_back(newBufInfo);
2442 }
2443
2444 // Destroy few buffers from top of the stack.
2445 for(size_t i = 0; i < maxBufCount / 5; ++i)
2446 {
2447 const BufferInfo& currBufInfo = bufInfo.back();
2448 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2449 bufInfo.pop_back();
2450 }
2451
2452 // Create some more
2453 for(size_t i = 0; i < maxBufCount / 5; ++i)
2454 {
2455 const bool upperAddress = (i % 2) != 0;
2456 if(upperAddress)
2457 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2458 else
2459 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002460 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002461 BufferInfo newBufInfo;
2462 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2463 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002464 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002465 bufInfo.push_back(newBufInfo);
2466 }
2467
2468 // Destroy the buffers in reverse order.
2469 while(!bufInfo.empty())
2470 {
2471 const BufferInfo& currBufInfo = bufInfo.back();
2472 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2473 bufInfo.pop_back();
2474 }
2475
2476 // Create buffers on both sides until we reach out of memory.
2477 prevOffsetLower = 0;
2478 prevOffsetUpper = poolCreateInfo.blockSize;
2479 res = VK_SUCCESS;
2480 for(size_t i = 0; res == VK_SUCCESS; ++i)
2481 {
2482 const bool upperAddress = (i % 2) != 0;
2483 if(upperAddress)
2484 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2485 else
2486 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002487 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002488 BufferInfo newBufInfo;
2489 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2490 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2491 if(res == VK_SUCCESS)
2492 {
2493 if(upperAddress)
2494 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002495 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002496 prevOffsetUpper = allocInfo.offset;
2497 }
2498 else
2499 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002500 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002501 prevOffsetLower = allocInfo.offset;
2502 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002503 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002504 bufInfo.push_back(newBufInfo);
2505 }
2506 }
2507
2508 // Destroy the buffers in random order.
2509 while(!bufInfo.empty())
2510 {
2511 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2512 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2513 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2514 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2515 }
2516
2517 // Create buffers on upper side only, constant size, until we reach out of memory.
2518 prevOffsetUpper = poolCreateInfo.blockSize;
2519 res = VK_SUCCESS;
2520 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2521 bufCreateInfo.size = bufSizeMax;
2522 for(size_t i = 0; res == VK_SUCCESS; ++i)
2523 {
2524 BufferInfo newBufInfo;
2525 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2526 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2527 if(res == VK_SUCCESS)
2528 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002529 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002530 prevOffsetUpper = allocInfo.offset;
2531 bufInfo.push_back(newBufInfo);
2532 }
2533 }
2534
2535 // Destroy the buffers in reverse order.
2536 while(!bufInfo.empty())
2537 {
2538 const BufferInfo& currBufInfo = bufInfo.back();
2539 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2540 bufInfo.pop_back();
2541 }
2542 }
2543
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002544 // Test ring buffer with lost allocations.
2545 {
2546 // Allocate number of buffers until pool is full.
2547 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2548 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2549 res = VK_SUCCESS;
2550 for(size_t i = 0; res == VK_SUCCESS; ++i)
2551 {
2552 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2553
Adam Sawickifd366b62019-01-24 15:26:43 +01002554 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002555
2556 BufferInfo newBufInfo;
2557 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2558 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2559 if(res == VK_SUCCESS)
2560 bufInfo.push_back(newBufInfo);
2561 }
2562
2563 // Free first half of it.
2564 {
2565 const size_t buffersToDelete = bufInfo.size() / 2;
2566 for(size_t i = 0; i < buffersToDelete; ++i)
2567 {
2568 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2569 }
2570 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2571 }
2572
2573 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002574 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002575 res = VK_SUCCESS;
2576 for(size_t i = 0; res == VK_SUCCESS; ++i)
2577 {
2578 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2579
Adam Sawickifd366b62019-01-24 15:26:43 +01002580 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002581
2582 BufferInfo newBufInfo;
2583 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2584 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2585 if(res == VK_SUCCESS)
2586 bufInfo.push_back(newBufInfo);
2587 }
2588
2589 VkDeviceSize firstNewOffset;
2590 {
2591 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2592
2593 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2594 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2595 bufCreateInfo.size = bufSizeMax;
2596
2597 BufferInfo newBufInfo;
2598 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2599 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002600 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002601 bufInfo.push_back(newBufInfo);
2602 firstNewOffset = allocInfo.offset;
2603
2604 // Make sure at least one buffer from the beginning became lost.
2605 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002606 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002607 }
2608
Adam Sawickifd366b62019-01-24 15:26:43 +01002609#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002610 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2611 size_t newCount = 1;
2612 for(;;)
2613 {
2614 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2615
Adam Sawickifd366b62019-01-24 15:26:43 +01002616 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002617
2618 BufferInfo newBufInfo;
2619 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2620 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01002621
Adam Sawickib8d34d52018-10-03 17:41:20 +02002622 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002623 bufInfo.push_back(newBufInfo);
2624 ++newCount;
2625 if(allocInfo.offset < firstNewOffset)
2626 break;
2627 }
Adam Sawickifd366b62019-01-24 15:26:43 +01002628#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002629
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002630 // Delete buffers that are lost.
2631 for(size_t i = bufInfo.size(); i--; )
2632 {
2633 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2634 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2635 {
2636 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2637 bufInfo.erase(bufInfo.begin() + i);
2638 }
2639 }
2640
2641 // Test vmaMakePoolAllocationsLost
2642 {
2643 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2644
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01002645 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002646 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002647 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002648
2649 size_t realLostAllocCount = 0;
2650 for(size_t i = 0; i < bufInfo.size(); ++i)
2651 {
2652 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2653 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2654 ++realLostAllocCount;
2655 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002656 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002657 }
2658
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002659 // Destroy all the buffers in forward order.
2660 for(size_t i = 0; i < bufInfo.size(); ++i)
2661 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2662 bufInfo.clear();
2663 }
2664
Adam Sawicki70a683e2018-08-24 15:36:32 +02002665 vmaDestroyPool(g_hAllocator, pool);
2666}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002667
Adam Sawicki70a683e2018-08-24 15:36:32 +02002668static void TestLinearAllocatorMultiBlock()
2669{
2670 wprintf(L"Test linear allocator multi block\n");
2671
2672 RandomNumberGenerator rand{345673};
2673
2674 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2675 sampleBufCreateInfo.size = 1024 * 1024;
2676 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2677
2678 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2679 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2680
2681 VmaPoolCreateInfo poolCreateInfo = {};
2682 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2683 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002684 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002685
2686 VmaPool pool = nullptr;
2687 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002688 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002689
2690 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2691
2692 VmaAllocationCreateInfo allocCreateInfo = {};
2693 allocCreateInfo.pool = pool;
2694
2695 std::vector<BufferInfo> bufInfo;
2696 VmaAllocationInfo allocInfo;
2697
2698 // Test one-time free.
2699 {
2700 // Allocate buffers until we move to a second block.
2701 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2702 for(uint32_t i = 0; ; ++i)
2703 {
2704 BufferInfo newBufInfo;
2705 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2706 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002707 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002708 bufInfo.push_back(newBufInfo);
2709 if(lastMem && allocInfo.deviceMemory != lastMem)
2710 {
2711 break;
2712 }
2713 lastMem = allocInfo.deviceMemory;
2714 }
2715
Adam Sawickib8d34d52018-10-03 17:41:20 +02002716 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002717
2718 // Make sure that pool has now two blocks.
2719 VmaPoolStats poolStats = {};
2720 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002721 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002722
2723 // Destroy all the buffers in random order.
2724 while(!bufInfo.empty())
2725 {
2726 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2727 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2728 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2729 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2730 }
2731
2732 // Make sure that pool has now at most one block.
2733 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002734 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002735 }
2736
2737 // Test stack.
2738 {
2739 // Allocate buffers until we move to a second block.
2740 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2741 for(uint32_t i = 0; ; ++i)
2742 {
2743 BufferInfo newBufInfo;
2744 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2745 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002746 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002747 bufInfo.push_back(newBufInfo);
2748 if(lastMem && allocInfo.deviceMemory != lastMem)
2749 {
2750 break;
2751 }
2752 lastMem = allocInfo.deviceMemory;
2753 }
2754
Adam Sawickib8d34d52018-10-03 17:41:20 +02002755 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002756
2757 // Add few more buffers.
2758 for(uint32_t i = 0; i < 5; ++i)
2759 {
2760 BufferInfo newBufInfo;
2761 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2762 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002763 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002764 bufInfo.push_back(newBufInfo);
2765 }
2766
2767 // Make sure that pool has now two blocks.
2768 VmaPoolStats poolStats = {};
2769 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002770 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002771
2772 // Delete half of buffers, LIFO.
2773 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2774 {
2775 const BufferInfo& currBufInfo = bufInfo.back();
2776 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2777 bufInfo.pop_back();
2778 }
2779
2780 // Add one more buffer.
2781 BufferInfo newBufInfo;
2782 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2783 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002784 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002785 bufInfo.push_back(newBufInfo);
2786
2787 // Make sure that pool has now one block.
2788 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002789 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002790
2791 // Delete all the remaining buffers, LIFO.
2792 while(!bufInfo.empty())
2793 {
2794 const BufferInfo& currBufInfo = bufInfo.back();
2795 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2796 bufInfo.pop_back();
2797 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002798 }
2799
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002800 vmaDestroyPool(g_hAllocator, pool);
2801}
2802
Adam Sawickifd11d752018-08-22 15:02:10 +02002803static void ManuallyTestLinearAllocator()
2804{
2805 VmaStats origStats;
2806 vmaCalculateStats(g_hAllocator, &origStats);
2807
2808 wprintf(L"Manually test linear allocator\n");
2809
2810 RandomNumberGenerator rand{645332};
2811
2812 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2813 sampleBufCreateInfo.size = 1024; // Whatever.
2814 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2815
2816 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2817 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2818
2819 VmaPoolCreateInfo poolCreateInfo = {};
2820 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002821 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002822
2823 poolCreateInfo.blockSize = 10 * 1024;
2824 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2825 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2826
2827 VmaPool pool = nullptr;
2828 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002829 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002830
2831 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2832
2833 VmaAllocationCreateInfo allocCreateInfo = {};
2834 allocCreateInfo.pool = pool;
2835
2836 std::vector<BufferInfo> bufInfo;
2837 VmaAllocationInfo allocInfo;
2838 BufferInfo newBufInfo;
2839
2840 // Test double stack.
2841 {
2842 /*
2843 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2844 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2845
2846 Totally:
2847 1 block allocated
2848 10240 Vulkan bytes
2849 6 new allocations
2850 2256 bytes in allocations
2851 */
2852
2853 bufCreateInfo.size = 32;
2854 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2855 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002856 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002857 bufInfo.push_back(newBufInfo);
2858
2859 bufCreateInfo.size = 1024;
2860 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2861 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002862 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002863 bufInfo.push_back(newBufInfo);
2864
2865 bufCreateInfo.size = 32;
2866 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2867 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002868 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002869 bufInfo.push_back(newBufInfo);
2870
2871 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2872
2873 bufCreateInfo.size = 128;
2874 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2875 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002876 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002877 bufInfo.push_back(newBufInfo);
2878
2879 bufCreateInfo.size = 1024;
2880 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2881 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002882 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002883 bufInfo.push_back(newBufInfo);
2884
2885 bufCreateInfo.size = 16;
2886 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2887 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002888 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002889 bufInfo.push_back(newBufInfo);
2890
2891 VmaStats currStats;
2892 vmaCalculateStats(g_hAllocator, &currStats);
2893 VmaPoolStats poolStats;
2894 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2895
2896 char* statsStr = nullptr;
2897 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2898
2899 // PUT BREAKPOINT HERE TO CHECK.
2900 // Inspect: currStats versus origStats, poolStats, statsStr.
2901 int I = 0;
2902
2903 vmaFreeStatsString(g_hAllocator, statsStr);
2904
2905 // Destroy the buffers in reverse order.
2906 while(!bufInfo.empty())
2907 {
2908 const BufferInfo& currBufInfo = bufInfo.back();
2909 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2910 bufInfo.pop_back();
2911 }
2912 }
2913
2914 vmaDestroyPool(g_hAllocator, pool);
2915}
2916
Adam Sawicki80927152018-09-07 17:27:23 +02002917static void BenchmarkAlgorithmsCase(FILE* file,
2918 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002919 bool empty,
2920 VmaAllocationCreateFlags allocStrategy,
2921 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002922{
2923 RandomNumberGenerator rand{16223};
2924
2925 const VkDeviceSize bufSizeMin = 32;
2926 const VkDeviceSize bufSizeMax = 1024;
2927 const size_t maxBufCapacity = 10000;
2928 const uint32_t iterationCount = 10;
2929
2930 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2931 sampleBufCreateInfo.size = bufSizeMax;
2932 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2933
2934 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2935 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2936
2937 VmaPoolCreateInfo poolCreateInfo = {};
2938 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002939 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002940
2941 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002942 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002943 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2944
2945 VmaPool pool = nullptr;
2946 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002947 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002948
2949 // Buffer created just to get memory requirements. Never bound to any memory.
2950 VkBuffer dummyBuffer = VK_NULL_HANDLE;
Adam Sawicki1f84f622019-07-02 13:40:01 +02002951 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002952 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002953
2954 VkMemoryRequirements memReq = {};
2955 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2956
Adam Sawicki1f84f622019-07-02 13:40:01 +02002957 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawicki0a607132018-08-24 11:18:41 +02002958
2959 VmaAllocationCreateInfo allocCreateInfo = {};
2960 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002961 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002962
2963 VmaAllocation alloc;
2964 std::vector<VmaAllocation> baseAllocations;
2965
2966 if(!empty)
2967 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002968 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002969 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002970 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002971 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01002972 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
2973 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02002974 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2975 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002976 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002977 baseAllocations.push_back(alloc);
2978 totalSize += memReq.size;
2979 }
2980
2981 // Delete half of them, choose randomly.
2982 size_t allocsToDelete = baseAllocations.size() / 2;
2983 for(size_t i = 0; i < allocsToDelete; ++i)
2984 {
2985 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2986 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2987 baseAllocations.erase(baseAllocations.begin() + index);
2988 }
2989 }
2990
2991 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002992 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002993 std::vector<VmaAllocation> testAllocations;
2994 testAllocations.reserve(allocCount);
2995 duration allocTotalDuration = duration::zero();
2996 duration freeTotalDuration = duration::zero();
2997 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2998 {
2999 // Allocations
3000 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
3001 for(size_t i = 0; i < allocCount; ++i)
3002 {
3003 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
3004 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003005 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003006 testAllocations.push_back(alloc);
3007 }
3008 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
3009
3010 // Deallocations
3011 switch(freeOrder)
3012 {
3013 case FREE_ORDER::FORWARD:
3014 // Leave testAllocations unchanged.
3015 break;
3016 case FREE_ORDER::BACKWARD:
3017 std::reverse(testAllocations.begin(), testAllocations.end());
3018 break;
3019 case FREE_ORDER::RANDOM:
3020 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
3021 break;
3022 default: assert(0);
3023 }
3024
3025 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
3026 for(size_t i = 0; i < allocCount; ++i)
3027 vmaFreeMemory(g_hAllocator, testAllocations[i]);
3028 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
3029
3030 testAllocations.clear();
3031 }
3032
3033 // Delete baseAllocations
3034 while(!baseAllocations.empty())
3035 {
3036 vmaFreeMemory(g_hAllocator, baseAllocations.back());
3037 baseAllocations.pop_back();
3038 }
3039
3040 vmaDestroyPool(g_hAllocator, pool);
3041
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003042 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
3043 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
3044
Adam Sawicki80927152018-09-07 17:27:23 +02003045 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
3046 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02003047 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003048 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02003049 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003050 allocTotalSeconds,
3051 freeTotalSeconds);
3052
3053 if(file)
3054 {
3055 std::string currTime;
3056 CurrentTimeToStr(currTime);
3057
Adam Sawicki80927152018-09-07 17:27:23 +02003058 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003059 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02003060 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003061 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003062 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003063 FREE_ORDER_NAMES[(uint32_t)freeOrder],
3064 allocTotalSeconds,
3065 freeTotalSeconds);
3066 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003067}
3068
Adam Sawicki80927152018-09-07 17:27:23 +02003069static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02003070{
Adam Sawicki80927152018-09-07 17:27:23 +02003071 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02003072
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003073 if(file)
3074 {
3075 fprintf(file,
3076 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02003077 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003078 "Allocation time (s),Deallocation time (s)\n");
3079 }
3080
Adam Sawicki0a607132018-08-24 11:18:41 +02003081 uint32_t freeOrderCount = 1;
3082 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
3083 freeOrderCount = 3;
3084 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
3085 freeOrderCount = 2;
3086
3087 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003088 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003089
3090 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3091 {
3092 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3093 switch(freeOrderIndex)
3094 {
3095 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3096 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3097 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3098 default: assert(0);
3099 }
3100
3101 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3102 {
Adam Sawicki80927152018-09-07 17:27:23 +02003103 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003104 {
Adam Sawicki80927152018-09-07 17:27:23 +02003105 uint32_t algorithm = 0;
3106 switch(algorithmIndex)
3107 {
3108 case 0:
3109 break;
3110 case 1:
3111 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3112 break;
3113 case 2:
3114 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3115 break;
3116 default:
3117 assert(0);
3118 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003119
Adam Sawicki80927152018-09-07 17:27:23 +02003120 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003121 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3122 {
3123 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003124 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003125 {
3126 switch(allocStrategyIndex)
3127 {
3128 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3129 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3130 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3131 default: assert(0);
3132 }
3133 }
3134
Adam Sawicki80927152018-09-07 17:27:23 +02003135 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003136 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003137 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003138 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003139 strategy,
3140 freeOrder); // freeOrder
3141 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003142 }
3143 }
3144 }
3145}
3146
Adam Sawickib8333fb2018-03-13 16:15:53 +01003147static void TestPool_SameSize()
3148{
3149 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3150 const size_t BUF_COUNT = 100;
3151 VkResult res;
3152
3153 RandomNumberGenerator rand{123};
3154
3155 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3156 bufferInfo.size = BUF_SIZE;
3157 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3158
3159 uint32_t memoryTypeBits = UINT32_MAX;
3160 {
3161 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003162 res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003163 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003164
3165 VkMemoryRequirements memReq;
3166 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3167 memoryTypeBits = memReq.memoryTypeBits;
3168
Adam Sawicki1f84f622019-07-02 13:40:01 +02003169 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003170 }
3171
3172 VmaAllocationCreateInfo poolAllocInfo = {};
3173 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3174 uint32_t memTypeIndex;
3175 res = vmaFindMemoryTypeIndex(
3176 g_hAllocator,
3177 memoryTypeBits,
3178 &poolAllocInfo,
3179 &memTypeIndex);
3180
3181 VmaPoolCreateInfo poolCreateInfo = {};
3182 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3183 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3184 poolCreateInfo.minBlockCount = 1;
3185 poolCreateInfo.maxBlockCount = 4;
3186 poolCreateInfo.frameInUseCount = 0;
3187
3188 VmaPool pool;
3189 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003190 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003191
Adam Sawickia020fb82019-11-02 14:43:06 +01003192 // Test pool name
3193 {
3194 static const char* const POOL_NAME = "Pool name";
3195 vmaSetPoolName(g_hAllocator, pool, POOL_NAME);
3196
3197 const char* fetchedPoolName = nullptr;
3198 vmaGetPoolName(g_hAllocator, pool, &fetchedPoolName);
3199 TEST(strcmp(fetchedPoolName, POOL_NAME) == 0);
3200
Adam Sawickia020fb82019-11-02 14:43:06 +01003201 vmaSetPoolName(g_hAllocator, pool, nullptr);
3202 }
3203
Adam Sawickib8333fb2018-03-13 16:15:53 +01003204 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3205
3206 VmaAllocationCreateInfo allocInfo = {};
3207 allocInfo.pool = pool;
3208 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3209 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3210
3211 struct BufItem
3212 {
3213 VkBuffer Buf;
3214 VmaAllocation Alloc;
3215 };
3216 std::vector<BufItem> items;
3217
3218 // Fill entire pool.
3219 for(size_t i = 0; i < BUF_COUNT; ++i)
3220 {
3221 BufItem item;
3222 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003223 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003224 items.push_back(item);
3225 }
3226
3227 // Make sure that another allocation would fail.
3228 {
3229 BufItem item;
3230 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003231 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003232 }
3233
3234 // Validate that no buffer is lost. Also check that they are not mapped.
3235 for(size_t i = 0; i < items.size(); ++i)
3236 {
3237 VmaAllocationInfo allocInfo;
3238 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003239 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3240 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003241 }
3242
3243 // Free some percent of random items.
3244 {
3245 const size_t PERCENT_TO_FREE = 10;
3246 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3247 for(size_t i = 0; i < itemsToFree; ++i)
3248 {
3249 size_t index = (size_t)rand.Generate() % items.size();
3250 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3251 items.erase(items.begin() + index);
3252 }
3253 }
3254
3255 // Randomly allocate and free items.
3256 {
3257 const size_t OPERATION_COUNT = BUF_COUNT;
3258 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3259 {
3260 bool allocate = rand.Generate() % 2 != 0;
3261 if(allocate)
3262 {
3263 if(items.size() < BUF_COUNT)
3264 {
3265 BufItem item;
3266 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003267 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003268 items.push_back(item);
3269 }
3270 }
3271 else // Free
3272 {
3273 if(!items.empty())
3274 {
3275 size_t index = (size_t)rand.Generate() % items.size();
3276 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3277 items.erase(items.begin() + index);
3278 }
3279 }
3280 }
3281 }
3282
3283 // Allocate up to maximum.
3284 while(items.size() < BUF_COUNT)
3285 {
3286 BufItem item;
3287 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003288 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003289 items.push_back(item);
3290 }
3291
3292 // Validate that no buffer is lost.
3293 for(size_t i = 0; i < items.size(); ++i)
3294 {
3295 VmaAllocationInfo allocInfo;
3296 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003297 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003298 }
3299
3300 // Next frame.
3301 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3302
3303 // Allocate another BUF_COUNT buffers.
3304 for(size_t i = 0; i < BUF_COUNT; ++i)
3305 {
3306 BufItem item;
3307 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003308 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003309 items.push_back(item);
3310 }
3311
3312 // Make sure the first BUF_COUNT is lost. Delete them.
3313 for(size_t i = 0; i < BUF_COUNT; ++i)
3314 {
3315 VmaAllocationInfo allocInfo;
3316 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003317 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003318 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3319 }
3320 items.erase(items.begin(), items.begin() + BUF_COUNT);
3321
3322 // Validate that no buffer is lost.
3323 for(size_t i = 0; i < items.size(); ++i)
3324 {
3325 VmaAllocationInfo allocInfo;
3326 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003327 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003328 }
3329
3330 // Free one item.
3331 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3332 items.pop_back();
3333
3334 // Validate statistics.
3335 {
3336 VmaPoolStats poolStats = {};
3337 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003338 TEST(poolStats.allocationCount == items.size());
3339 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3340 TEST(poolStats.unusedRangeCount == 1);
3341 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3342 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003343 }
3344
3345 // Free all remaining items.
3346 for(size_t i = items.size(); i--; )
3347 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3348 items.clear();
3349
3350 // Allocate maximum items again.
3351 for(size_t i = 0; i < BUF_COUNT; ++i)
3352 {
3353 BufItem item;
3354 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003355 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003356 items.push_back(item);
3357 }
3358
3359 // Delete every other item.
3360 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3361 {
3362 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3363 items.erase(items.begin() + i);
3364 }
3365
3366 // Defragment!
3367 {
3368 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3369 for(size_t i = 0; i < items.size(); ++i)
3370 allocationsToDefragment[i] = items[i].Alloc;
3371
3372 VmaDefragmentationStats defragmentationStats;
3373 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003374 TEST(res == VK_SUCCESS);
3375 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003376 }
3377
3378 // Free all remaining items.
3379 for(size_t i = items.size(); i--; )
3380 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3381 items.clear();
3382
3383 ////////////////////////////////////////////////////////////////////////////////
3384 // Test for vmaMakePoolAllocationsLost
3385
3386 // Allocate 4 buffers on frame 10.
3387 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3388 for(size_t i = 0; i < 4; ++i)
3389 {
3390 BufItem item;
3391 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003392 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003393 items.push_back(item);
3394 }
3395
3396 // Touch first 2 of them on frame 11.
3397 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3398 for(size_t i = 0; i < 2; ++i)
3399 {
3400 VmaAllocationInfo allocInfo;
3401 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3402 }
3403
3404 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3405 size_t lostCount = 0xDEADC0DE;
3406 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003407 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003408
3409 // Make another call. Now 0 should be lost.
3410 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003411 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003412
3413 // Make another call, with null count. Should not crash.
3414 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3415
3416 // END: Free all remaining items.
3417 for(size_t i = items.size(); i--; )
3418 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3419
3420 items.clear();
3421
Adam Sawickid2924172018-06-11 12:48:46 +02003422 ////////////////////////////////////////////////////////////////////////////////
3423 // Test for allocation too large for pool
3424
3425 {
3426 VmaAllocationCreateInfo allocCreateInfo = {};
3427 allocCreateInfo.pool = pool;
3428
3429 VkMemoryRequirements memReq;
3430 memReq.memoryTypeBits = UINT32_MAX;
3431 memReq.alignment = 1;
3432 memReq.size = poolCreateInfo.blockSize + 4;
3433
3434 VmaAllocation alloc = nullptr;
3435 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003436 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003437 }
3438
Adam Sawickib8333fb2018-03-13 16:15:53 +01003439 vmaDestroyPool(g_hAllocator, pool);
3440}
3441
Adam Sawickie44c6262018-06-15 14:30:39 +02003442static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3443{
3444 const uint8_t* pBytes = (const uint8_t*)pMemory;
3445 for(size_t i = 0; i < size; ++i)
3446 {
3447 if(pBytes[i] != pattern)
3448 {
3449 return false;
3450 }
3451 }
3452 return true;
3453}
3454
3455static void TestAllocationsInitialization()
3456{
3457 VkResult res;
3458
3459 const size_t BUF_SIZE = 1024;
3460
3461 // Create pool.
3462
3463 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3464 bufInfo.size = BUF_SIZE;
3465 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3466
3467 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3468 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3469
3470 VmaPoolCreateInfo poolCreateInfo = {};
3471 poolCreateInfo.blockSize = BUF_SIZE * 10;
3472 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3473 poolCreateInfo.maxBlockCount = 1;
3474 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003475 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003476
3477 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3478 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003479 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003480
3481 // Create one persistently mapped buffer to keep memory of this block mapped,
3482 // so that pointer to mapped data will remain (more or less...) valid even
3483 // after destruction of other allocations.
3484
3485 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3486 VkBuffer firstBuf;
3487 VmaAllocation firstAlloc;
3488 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003489 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003490
3491 // Test buffers.
3492
3493 for(uint32_t i = 0; i < 2; ++i)
3494 {
3495 const bool persistentlyMapped = i == 0;
3496 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3497 VkBuffer buf;
3498 VmaAllocation alloc;
3499 VmaAllocationInfo allocInfo;
3500 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003501 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003502
3503 void* pMappedData;
3504 if(!persistentlyMapped)
3505 {
3506 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003507 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003508 }
3509 else
3510 {
3511 pMappedData = allocInfo.pMappedData;
3512 }
3513
3514 // Validate initialized content
3515 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003516 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003517
3518 if(!persistentlyMapped)
3519 {
3520 vmaUnmapMemory(g_hAllocator, alloc);
3521 }
3522
3523 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3524
3525 // Validate freed content
3526 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003527 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003528 }
3529
3530 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3531 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3532}
3533
Adam Sawickib8333fb2018-03-13 16:15:53 +01003534static void TestPool_Benchmark(
3535 PoolTestResult& outResult,
3536 const PoolTestConfig& config)
3537{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003538 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003539
3540 RandomNumberGenerator mainRand{config.RandSeed};
3541
3542 uint32_t allocationSizeProbabilitySum = std::accumulate(
3543 config.AllocationSizes.begin(),
3544 config.AllocationSizes.end(),
3545 0u,
3546 [](uint32_t sum, const AllocationSize& allocSize) {
3547 return sum + allocSize.Probability;
3548 });
3549
3550 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3551 bufferInfo.size = 256; // Whatever.
3552 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3553
3554 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3555 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3556 imageInfo.extent.width = 256; // Whatever.
3557 imageInfo.extent.height = 256; // Whatever.
3558 imageInfo.extent.depth = 1;
3559 imageInfo.mipLevels = 1;
3560 imageInfo.arrayLayers = 1;
3561 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3562 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3563 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3564 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3565 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3566
3567 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3568 {
3569 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003570 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003571 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003572
3573 VkMemoryRequirements memReq;
3574 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3575 bufferMemoryTypeBits = memReq.memoryTypeBits;
3576
Adam Sawicki1f84f622019-07-02 13:40:01 +02003577 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003578 }
3579
3580 uint32_t imageMemoryTypeBits = UINT32_MAX;
3581 {
3582 VkImage dummyImage;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003583 VkResult res = vkCreateImage(g_hDevice, &imageInfo, g_Allocs, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003584 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003585
3586 VkMemoryRequirements memReq;
3587 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3588 imageMemoryTypeBits = memReq.memoryTypeBits;
3589
Adam Sawicki1f84f622019-07-02 13:40:01 +02003590 vkDestroyImage(g_hDevice, dummyImage, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003591 }
3592
3593 uint32_t memoryTypeBits = 0;
3594 if(config.UsesBuffers() && config.UsesImages())
3595 {
3596 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3597 if(memoryTypeBits == 0)
3598 {
3599 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3600 return;
3601 }
3602 }
3603 else if(config.UsesBuffers())
3604 memoryTypeBits = bufferMemoryTypeBits;
3605 else if(config.UsesImages())
3606 memoryTypeBits = imageMemoryTypeBits;
3607 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003608 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003609
3610 VmaPoolCreateInfo poolCreateInfo = {};
3611 poolCreateInfo.memoryTypeIndex = 0;
3612 poolCreateInfo.minBlockCount = 1;
3613 poolCreateInfo.maxBlockCount = 1;
3614 poolCreateInfo.blockSize = config.PoolSize;
3615 poolCreateInfo.frameInUseCount = 1;
3616
3617 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3618 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3619 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3620
3621 VmaPool pool;
3622 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003623 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003624
3625 // Start time measurement - after creating pool and initializing data structures.
3626 time_point timeBeg = std::chrono::high_resolution_clock::now();
3627
3628 ////////////////////////////////////////////////////////////////////////////////
3629 // ThreadProc
3630 auto ThreadProc = [&](
3631 PoolTestThreadResult* outThreadResult,
3632 uint32_t randSeed,
3633 HANDLE frameStartEvent,
3634 HANDLE frameEndEvent) -> void
3635 {
3636 RandomNumberGenerator threadRand{randSeed};
3637
3638 outThreadResult->AllocationTimeMin = duration::max();
3639 outThreadResult->AllocationTimeSum = duration::zero();
3640 outThreadResult->AllocationTimeMax = duration::min();
3641 outThreadResult->DeallocationTimeMin = duration::max();
3642 outThreadResult->DeallocationTimeSum = duration::zero();
3643 outThreadResult->DeallocationTimeMax = duration::min();
3644 outThreadResult->AllocationCount = 0;
3645 outThreadResult->DeallocationCount = 0;
3646 outThreadResult->LostAllocationCount = 0;
3647 outThreadResult->LostAllocationTotalSize = 0;
3648 outThreadResult->FailedAllocationCount = 0;
3649 outThreadResult->FailedAllocationTotalSize = 0;
3650
3651 struct Item
3652 {
3653 VkDeviceSize BufferSize;
3654 VkExtent2D ImageSize;
3655 VkBuffer Buf;
3656 VkImage Image;
3657 VmaAllocation Alloc;
3658
3659 VkDeviceSize CalcSizeBytes() const
3660 {
3661 return BufferSize +
3662 ImageSize.width * ImageSize.height * 4;
3663 }
3664 };
3665 std::vector<Item> unusedItems, usedItems;
3666
3667 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3668
3669 // Create all items - all unused, not yet allocated.
3670 for(size_t i = 0; i < threadTotalItemCount; ++i)
3671 {
3672 Item item = {};
3673
3674 uint32_t allocSizeIndex = 0;
3675 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3676 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3677 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3678
3679 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3680 if(allocSize.BufferSizeMax > 0)
3681 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003682 TEST(allocSize.BufferSizeMin > 0);
3683 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003684 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3685 item.BufferSize = allocSize.BufferSizeMin;
3686 else
3687 {
3688 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3689 item.BufferSize = item.BufferSize / 16 * 16;
3690 }
3691 }
3692 else
3693 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003694 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003695 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3696 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3697 else
3698 {
3699 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3700 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3701 }
3702 }
3703
3704 unusedItems.push_back(item);
3705 }
3706
3707 auto Allocate = [&](Item& item) -> VkResult
3708 {
3709 VmaAllocationCreateInfo allocCreateInfo = {};
3710 allocCreateInfo.pool = pool;
3711 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3712 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3713
3714 if(item.BufferSize)
3715 {
3716 bufferInfo.size = item.BufferSize;
3717 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3718 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3719 }
3720 else
3721 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003722 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003723
3724 imageInfo.extent.width = item.ImageSize.width;
3725 imageInfo.extent.height = item.ImageSize.height;
3726 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3727 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3728 }
3729 };
3730
3731 ////////////////////////////////////////////////////////////////////////////////
3732 // Frames
3733 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3734 {
3735 WaitForSingleObject(frameStartEvent, INFINITE);
3736
3737 // Always make some percent of used bufs unused, to choose different used ones.
3738 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3739 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3740 {
3741 size_t index = threadRand.Generate() % usedItems.size();
3742 unusedItems.push_back(usedItems[index]);
3743 usedItems.erase(usedItems.begin() + index);
3744 }
3745
3746 // Determine which bufs we want to use in this frame.
3747 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3748 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003749 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003750 // Move some used to unused.
3751 while(usedBufCount < usedItems.size())
3752 {
3753 size_t index = threadRand.Generate() % usedItems.size();
3754 unusedItems.push_back(usedItems[index]);
3755 usedItems.erase(usedItems.begin() + index);
3756 }
3757 // Move some unused to used.
3758 while(usedBufCount > usedItems.size())
3759 {
3760 size_t index = threadRand.Generate() % unusedItems.size();
3761 usedItems.push_back(unusedItems[index]);
3762 unusedItems.erase(unusedItems.begin() + index);
3763 }
3764
3765 uint32_t touchExistingCount = 0;
3766 uint32_t touchLostCount = 0;
3767 uint32_t createSucceededCount = 0;
3768 uint32_t createFailedCount = 0;
3769
3770 // Touch all used bufs. If not created or lost, allocate.
3771 for(size_t i = 0; i < usedItems.size(); ++i)
3772 {
3773 Item& item = usedItems[i];
3774 // Not yet created.
3775 if(item.Alloc == VK_NULL_HANDLE)
3776 {
3777 res = Allocate(item);
3778 ++outThreadResult->AllocationCount;
3779 if(res != VK_SUCCESS)
3780 {
3781 item.Alloc = VK_NULL_HANDLE;
3782 item.Buf = VK_NULL_HANDLE;
3783 ++outThreadResult->FailedAllocationCount;
3784 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3785 ++createFailedCount;
3786 }
3787 else
3788 ++createSucceededCount;
3789 }
3790 else
3791 {
3792 // Touch.
3793 VmaAllocationInfo allocInfo;
3794 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3795 // Lost.
3796 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3797 {
3798 ++touchLostCount;
3799
3800 // Destroy.
3801 {
3802 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3803 if(item.Buf)
3804 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3805 else
3806 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3807 ++outThreadResult->DeallocationCount;
3808 }
3809 item.Alloc = VK_NULL_HANDLE;
3810 item.Buf = VK_NULL_HANDLE;
3811
3812 ++outThreadResult->LostAllocationCount;
3813 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3814
3815 // Recreate.
3816 res = Allocate(item);
3817 ++outThreadResult->AllocationCount;
3818 // Creation failed.
3819 if(res != VK_SUCCESS)
3820 {
3821 ++outThreadResult->FailedAllocationCount;
3822 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3823 ++createFailedCount;
3824 }
3825 else
3826 ++createSucceededCount;
3827 }
3828 else
3829 ++touchExistingCount;
3830 }
3831 }
3832
3833 /*
3834 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3835 randSeed, frameIndex,
3836 touchExistingCount, touchLostCount,
3837 createSucceededCount, createFailedCount);
3838 */
3839
3840 SetEvent(frameEndEvent);
3841 }
3842
3843 // Free all remaining items.
3844 for(size_t i = usedItems.size(); i--; )
3845 {
3846 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3847 if(usedItems[i].Buf)
3848 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3849 else
3850 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3851 ++outThreadResult->DeallocationCount;
3852 }
3853 for(size_t i = unusedItems.size(); i--; )
3854 {
3855 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3856 if(unusedItems[i].Buf)
3857 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3858 else
3859 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3860 ++outThreadResult->DeallocationCount;
3861 }
3862 };
3863
3864 // Launch threads.
3865 uint32_t threadRandSeed = mainRand.Generate();
3866 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3867 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3868 std::vector<std::thread> bkgThreads;
3869 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3870 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3871 {
3872 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3873 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3874 bkgThreads.emplace_back(std::bind(
3875 ThreadProc,
3876 &threadResults[threadIndex],
3877 threadRandSeed + threadIndex,
3878 frameStartEvents[threadIndex],
3879 frameEndEvents[threadIndex]));
3880 }
3881
3882 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003883 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003884 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3885 {
3886 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3887 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3888 SetEvent(frameStartEvents[threadIndex]);
3889 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3890 }
3891
3892 // Wait for threads finished
3893 for(size_t i = 0; i < bkgThreads.size(); ++i)
3894 {
3895 bkgThreads[i].join();
3896 CloseHandle(frameEndEvents[i]);
3897 CloseHandle(frameStartEvents[i]);
3898 }
3899 bkgThreads.clear();
3900
3901 // Finish time measurement - before destroying pool.
3902 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3903
3904 vmaDestroyPool(g_hAllocator, pool);
3905
3906 outResult.AllocationTimeMin = duration::max();
3907 outResult.AllocationTimeAvg = duration::zero();
3908 outResult.AllocationTimeMax = duration::min();
3909 outResult.DeallocationTimeMin = duration::max();
3910 outResult.DeallocationTimeAvg = duration::zero();
3911 outResult.DeallocationTimeMax = duration::min();
3912 outResult.LostAllocationCount = 0;
3913 outResult.LostAllocationTotalSize = 0;
3914 outResult.FailedAllocationCount = 0;
3915 outResult.FailedAllocationTotalSize = 0;
3916 size_t allocationCount = 0;
3917 size_t deallocationCount = 0;
3918 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3919 {
3920 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3921 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3922 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3923 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3924 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3925 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3926 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3927 allocationCount += threadResult.AllocationCount;
3928 deallocationCount += threadResult.DeallocationCount;
3929 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3930 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3931 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3932 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3933 }
3934 if(allocationCount)
3935 outResult.AllocationTimeAvg /= allocationCount;
3936 if(deallocationCount)
3937 outResult.DeallocationTimeAvg /= deallocationCount;
3938}
3939
3940static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3941{
3942 if(ptr1 < ptr2)
3943 return ptr1 + size1 > ptr2;
3944 else if(ptr2 < ptr1)
3945 return ptr2 + size2 > ptr1;
3946 else
3947 return true;
3948}
3949
Adam Sawickiefa88c42019-11-18 16:33:56 +01003950static void TestMemoryUsage()
3951{
3952 wprintf(L"Testing memory usage:\n");
3953
Adam Sawicki69185552019-11-18 17:03:34 +01003954 static const VmaMemoryUsage lastUsage = VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED;
Adam Sawickiefa88c42019-11-18 16:33:56 +01003955 for(uint32_t usage = 0; usage <= lastUsage; ++usage)
3956 {
3957 switch(usage)
3958 {
3959 case VMA_MEMORY_USAGE_UNKNOWN: printf(" VMA_MEMORY_USAGE_UNKNOWN:\n"); break;
3960 case VMA_MEMORY_USAGE_GPU_ONLY: printf(" VMA_MEMORY_USAGE_GPU_ONLY:\n"); break;
3961 case VMA_MEMORY_USAGE_CPU_ONLY: printf(" VMA_MEMORY_USAGE_CPU_ONLY:\n"); break;
3962 case VMA_MEMORY_USAGE_CPU_TO_GPU: printf(" VMA_MEMORY_USAGE_CPU_TO_GPU:\n"); break;
3963 case VMA_MEMORY_USAGE_GPU_TO_CPU: printf(" VMA_MEMORY_USAGE_GPU_TO_CPU:\n"); break;
3964 case VMA_MEMORY_USAGE_CPU_COPY: printf(" VMA_MEMORY_USAGE_CPU_COPY:\n"); break;
Adam Sawicki69185552019-11-18 17:03:34 +01003965 case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED: printf(" VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED:\n"); break;
Adam Sawickiefa88c42019-11-18 16:33:56 +01003966 default: assert(0);
3967 }
3968
3969 auto printResult = [](const char* testName, VkResult res, uint32_t memoryTypeBits, uint32_t memoryTypeIndex)
3970 {
3971 if(res == VK_SUCCESS)
3972 printf(" %s: memoryTypeBits=0x%X, memoryTypeIndex=%u\n", testName, memoryTypeBits, memoryTypeIndex);
3973 else
3974 printf(" %s: memoryTypeBits=0x%X, FAILED with res=%d\n", testName, memoryTypeBits, (int32_t)res);
3975 };
3976
3977 // 1: Buffer for copy
3978 {
3979 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3980 bufCreateInfo.size = 65536;
3981 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3982
3983 VkBuffer buf = VK_NULL_HANDLE;
3984 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
3985 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
3986
3987 VkMemoryRequirements memReq = {};
3988 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
3989
3990 VmaAllocationCreateInfo allocCreateInfo = {};
3991 allocCreateInfo.usage = (VmaMemoryUsage)usage;
3992 VmaAllocation alloc = VK_NULL_HANDLE;
3993 VmaAllocationInfo allocInfo = {};
3994 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
3995 if(res == VK_SUCCESS)
3996 {
3997 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
3998 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
3999 TEST(res == VK_SUCCESS);
4000 }
4001 printResult("Buffer TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
4002 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4003 }
4004
4005 // 2: Vertex buffer
4006 {
4007 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4008 bufCreateInfo.size = 65536;
4009 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4010
4011 VkBuffer buf = VK_NULL_HANDLE;
4012 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
4013 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
4014
4015 VkMemoryRequirements memReq = {};
4016 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
4017
4018 VmaAllocationCreateInfo allocCreateInfo = {};
4019 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4020 VmaAllocation alloc = VK_NULL_HANDLE;
4021 VmaAllocationInfo allocInfo = {};
4022 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
4023 if(res == VK_SUCCESS)
4024 {
4025 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4026 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
4027 TEST(res == VK_SUCCESS);
4028 }
4029 printResult("Buffer TRANSFER_DST + VERTEX_BUFFER", res, memReq.memoryTypeBits, allocInfo.memoryType);
4030 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4031 }
4032
4033 // 3: Image for copy, OPTIMAL
4034 {
4035 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4036 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4037 imgCreateInfo.extent.width = 256;
4038 imgCreateInfo.extent.height = 256;
4039 imgCreateInfo.extent.depth = 1;
4040 imgCreateInfo.mipLevels = 1;
4041 imgCreateInfo.arrayLayers = 1;
4042 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4043 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4044 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4045 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
4046 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4047
4048 VkImage img = VK_NULL_HANDLE;
4049 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4050 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4051
4052 VkMemoryRequirements memReq = {};
4053 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4054
4055 VmaAllocationCreateInfo allocCreateInfo = {};
4056 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4057 VmaAllocation alloc = VK_NULL_HANDLE;
4058 VmaAllocationInfo allocInfo = {};
4059 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4060 if(res == VK_SUCCESS)
4061 {
4062 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4063 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4064 TEST(res == VK_SUCCESS);
4065 }
4066 printResult("Image OPTIMAL TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
4067
4068 vmaDestroyImage(g_hAllocator, img, alloc);
4069 }
4070
4071 // 4: Image SAMPLED, OPTIMAL
4072 {
4073 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4074 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4075 imgCreateInfo.extent.width = 256;
4076 imgCreateInfo.extent.height = 256;
4077 imgCreateInfo.extent.depth = 1;
4078 imgCreateInfo.mipLevels = 1;
4079 imgCreateInfo.arrayLayers = 1;
4080 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4081 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4082 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4083 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
4084 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4085
4086 VkImage img = VK_NULL_HANDLE;
4087 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4088 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4089
4090 VkMemoryRequirements memReq = {};
4091 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4092
4093 VmaAllocationCreateInfo allocCreateInfo = {};
4094 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4095 VmaAllocation alloc = VK_NULL_HANDLE;
4096 VmaAllocationInfo allocInfo = {};
4097 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4098 if(res == VK_SUCCESS)
4099 {
4100 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4101 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4102 TEST(res == VK_SUCCESS);
4103 }
4104 printResult("Image OPTIMAL TRANSFER_DST + SAMPLED", res, memReq.memoryTypeBits, allocInfo.memoryType);
4105 vmaDestroyImage(g_hAllocator, img, alloc);
4106 }
4107
4108 // 5: Image COLOR_ATTACHMENT, OPTIMAL
4109 {
4110 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4111 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4112 imgCreateInfo.extent.width = 256;
4113 imgCreateInfo.extent.height = 256;
4114 imgCreateInfo.extent.depth = 1;
4115 imgCreateInfo.mipLevels = 1;
4116 imgCreateInfo.arrayLayers = 1;
4117 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4118 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4119 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4120 imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4121 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4122
4123 VkImage img = VK_NULL_HANDLE;
4124 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4125 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4126
4127 VkMemoryRequirements memReq = {};
4128 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4129
4130 VmaAllocationCreateInfo allocCreateInfo = {};
4131 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4132 VmaAllocation alloc = VK_NULL_HANDLE;
4133 VmaAllocationInfo allocInfo = {};
4134 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4135 if(res == VK_SUCCESS)
4136 {
4137 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4138 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4139 TEST(res == VK_SUCCESS);
4140 }
4141 printResult("Image OPTIMAL SAMPLED + COLOR_ATTACHMENT", res, memReq.memoryTypeBits, allocInfo.memoryType);
4142 vmaDestroyImage(g_hAllocator, img, alloc);
4143 }
4144 }
4145}
4146
Adam Sawicki40ffe982019-10-11 15:56:02 +02004147static void TestBudget()
4148{
4149 wprintf(L"Testing budget...\n");
4150
Adam Sawicki353e3672019-11-02 14:12:05 +01004151 static const VkDeviceSize BUF_SIZE = 100ull * 1024 * 1024;
4152 static const uint32_t BUF_COUNT = 4;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004153
4154 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
4155 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004156 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
4157
4158 VmaBudget budgetBeg[VK_MAX_MEMORY_HEAPS] = {};
4159 vmaGetBudget(g_hAllocator, budgetBeg);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004160
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01004161 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
4162 {
4163 TEST(budgetBeg[i].allocationBytes <= budgetBeg[i].blockBytes);
4164 }
4165
Adam Sawicki40ffe982019-10-11 15:56:02 +02004166 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4167 bufInfo.size = BUF_SIZE;
4168 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4169
4170 VmaAllocationCreateInfo allocCreateInfo = {};
4171 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4172 if(testIndex == 0)
4173 {
4174 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4175 }
4176
4177 // CREATE BUFFERS
4178 uint32_t heapIndex = 0;
4179 BufferInfo bufInfos[BUF_COUNT] = {};
4180 for(uint32_t bufIndex = 0; bufIndex < BUF_COUNT; ++bufIndex)
4181 {
4182 VmaAllocationInfo allocInfo;
4183 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4184 &bufInfos[bufIndex].Buffer, &bufInfos[bufIndex].Allocation, &allocInfo);
4185 TEST(res == VK_SUCCESS);
4186 if(bufIndex == 0)
4187 {
4188 heapIndex = MemoryTypeToHeap(allocInfo.memoryType);
4189 }
4190 else
4191 {
4192 // All buffers need to fall into the same heap.
4193 TEST(MemoryTypeToHeap(allocInfo.memoryType) == heapIndex);
4194 }
4195 }
4196
Adam Sawicki353e3672019-11-02 14:12:05 +01004197 VmaBudget budgetWithBufs[VK_MAX_MEMORY_HEAPS] = {};
4198 vmaGetBudget(g_hAllocator, budgetWithBufs);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004199
4200 // DESTROY BUFFERS
4201 for(size_t bufIndex = BUF_COUNT; bufIndex--; )
4202 {
4203 vmaDestroyBuffer(g_hAllocator, bufInfos[bufIndex].Buffer, bufInfos[bufIndex].Allocation);
4204 }
4205
Adam Sawicki353e3672019-11-02 14:12:05 +01004206 VmaBudget budgetEnd[VK_MAX_MEMORY_HEAPS] = {};
4207 vmaGetBudget(g_hAllocator, budgetEnd);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004208
4209 // CHECK
4210 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
4211 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004212 TEST(budgetEnd[i].allocationBytes <= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004213 if(i == heapIndex)
4214 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004215 TEST(budgetEnd[i].allocationBytes == budgetBeg[i].allocationBytes);
4216 TEST(budgetWithBufs[i].allocationBytes == budgetBeg[i].allocationBytes + BUF_SIZE * BUF_COUNT);
4217 TEST(budgetWithBufs[i].blockBytes >= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004218 }
4219 else
4220 {
Adam Sawicki353e3672019-11-02 14:12:05 +01004221 TEST(budgetEnd[i].allocationBytes == budgetEnd[i].allocationBytes &&
4222 budgetEnd[i].allocationBytes == budgetWithBufs[i].allocationBytes);
4223 TEST(budgetEnd[i].blockBytes == budgetEnd[i].blockBytes &&
4224 budgetEnd[i].blockBytes == budgetWithBufs[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02004225 }
4226 }
4227 }
4228}
4229
Adam Sawickib8333fb2018-03-13 16:15:53 +01004230static void TestMapping()
4231{
4232 wprintf(L"Testing mapping...\n");
4233
4234 VkResult res;
4235 uint32_t memTypeIndex = UINT32_MAX;
4236
4237 enum TEST
4238 {
4239 TEST_NORMAL,
4240 TEST_POOL,
4241 TEST_DEDICATED,
4242 TEST_COUNT
4243 };
4244 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4245 {
4246 VmaPool pool = nullptr;
4247 if(testIndex == TEST_POOL)
4248 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004249 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004250 VmaPoolCreateInfo poolInfo = {};
4251 poolInfo.memoryTypeIndex = memTypeIndex;
4252 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004253 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004254 }
4255
4256 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4257 bufInfo.size = 0x10000;
4258 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004259
Adam Sawickib8333fb2018-03-13 16:15:53 +01004260 VmaAllocationCreateInfo allocCreateInfo = {};
4261 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4262 allocCreateInfo.pool = pool;
4263 if(testIndex == TEST_DEDICATED)
4264 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004265
Adam Sawickib8333fb2018-03-13 16:15:53 +01004266 VmaAllocationInfo allocInfo;
Adam Sawicki40ffe982019-10-11 15:56:02 +02004267
Adam Sawickib8333fb2018-03-13 16:15:53 +01004268 // Mapped manually
4269
4270 // Create 2 buffers.
4271 BufferInfo bufferInfos[3];
4272 for(size_t i = 0; i < 2; ++i)
4273 {
4274 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4275 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004276 TEST(res == VK_SUCCESS);
4277 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004278 memTypeIndex = allocInfo.memoryType;
4279 }
Adam Sawicki40ffe982019-10-11 15:56:02 +02004280
Adam Sawickib8333fb2018-03-13 16:15:53 +01004281 // Map buffer 0.
4282 char* data00 = nullptr;
4283 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004284 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004285 data00[0xFFFF] = data00[0];
4286
4287 // Map buffer 0 second time.
4288 char* data01 = nullptr;
4289 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004290 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004291
4292 // Map buffer 1.
4293 char* data1 = nullptr;
4294 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004295 TEST(res == VK_SUCCESS && data1 != nullptr);
4296 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004297 data1[0xFFFF] = data1[0];
4298
4299 // Unmap buffer 0 two times.
4300 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4301 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4302 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004303 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004304
4305 // Unmap buffer 1.
4306 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4307 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004308 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004309
4310 // Create 3rd buffer - persistently mapped.
4311 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4312 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4313 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004314 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004315
4316 // Map buffer 2.
4317 char* data2 = nullptr;
4318 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004319 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004320 data2[0xFFFF] = data2[0];
4321
4322 // Unmap buffer 2.
4323 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4324 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004325 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004326
4327 // Destroy all buffers.
4328 for(size_t i = 3; i--; )
4329 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4330
4331 vmaDestroyPool(g_hAllocator, pool);
4332 }
4333}
4334
Adam Sawickidaa6a552019-06-25 15:26:37 +02004335// Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
4336static void TestDeviceLocalMapped()
4337{
4338 VkResult res;
4339
4340 for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
4341 {
4342 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4343 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4344 bufCreateInfo.size = 4096;
4345
4346 VmaPool pool = VK_NULL_HANDLE;
4347 VmaAllocationCreateInfo allocCreateInfo = {};
4348 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4349 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
4350 if(testIndex == 2)
4351 {
4352 VmaPoolCreateInfo poolCreateInfo = {};
4353 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4354 TEST(res == VK_SUCCESS);
4355 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
4356 TEST(res == VK_SUCCESS);
4357 allocCreateInfo.pool = pool;
4358 }
4359 else if(testIndex == 1)
4360 {
4361 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
4362 }
4363
4364 VkBuffer buf = VK_NULL_HANDLE;
4365 VmaAllocation alloc = VK_NULL_HANDLE;
4366 VmaAllocationInfo allocInfo = {};
4367 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
4368 TEST(res == VK_SUCCESS && alloc);
4369
4370 VkMemoryPropertyFlags memTypeFlags = 0;
4371 vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
4372 const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
4373 TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
4374
4375 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4376 vmaDestroyPool(g_hAllocator, pool);
4377 }
4378}
4379
Adam Sawickib8333fb2018-03-13 16:15:53 +01004380static void TestMappingMultithreaded()
4381{
4382 wprintf(L"Testing mapping multithreaded...\n");
4383
4384 static const uint32_t threadCount = 16;
4385 static const uint32_t bufferCount = 1024;
4386 static const uint32_t threadBufferCount = bufferCount / threadCount;
4387
4388 VkResult res;
4389 volatile uint32_t memTypeIndex = UINT32_MAX;
4390
4391 enum TEST
4392 {
4393 TEST_NORMAL,
4394 TEST_POOL,
4395 TEST_DEDICATED,
4396 TEST_COUNT
4397 };
4398 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4399 {
4400 VmaPool pool = nullptr;
4401 if(testIndex == TEST_POOL)
4402 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004403 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004404 VmaPoolCreateInfo poolInfo = {};
4405 poolInfo.memoryTypeIndex = memTypeIndex;
4406 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004407 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004408 }
4409
4410 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4411 bufCreateInfo.size = 0x10000;
4412 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4413
4414 VmaAllocationCreateInfo allocCreateInfo = {};
4415 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4416 allocCreateInfo.pool = pool;
4417 if(testIndex == TEST_DEDICATED)
4418 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4419
4420 std::thread threads[threadCount];
4421 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4422 {
4423 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4424 // ======== THREAD FUNCTION ========
4425
4426 RandomNumberGenerator rand{threadIndex};
4427
4428 enum class MODE
4429 {
4430 // Don't map this buffer at all.
4431 DONT_MAP,
4432 // Map and quickly unmap.
4433 MAP_FOR_MOMENT,
4434 // Map and unmap before destruction.
4435 MAP_FOR_LONGER,
4436 // Map two times. Quickly unmap, second unmap before destruction.
4437 MAP_TWO_TIMES,
4438 // Create this buffer as persistently mapped.
4439 PERSISTENTLY_MAPPED,
4440 COUNT
4441 };
4442 std::vector<BufferInfo> bufInfos{threadBufferCount};
4443 std::vector<MODE> bufModes{threadBufferCount};
4444
4445 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4446 {
4447 BufferInfo& bufInfo = bufInfos[bufferIndex];
4448 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4449 bufModes[bufferIndex] = mode;
4450
4451 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4452 if(mode == MODE::PERSISTENTLY_MAPPED)
4453 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4454
4455 VmaAllocationInfo allocInfo;
4456 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4457 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004458 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004459
4460 if(memTypeIndex == UINT32_MAX)
4461 memTypeIndex = allocInfo.memoryType;
4462
4463 char* data = nullptr;
4464
4465 if(mode == MODE::PERSISTENTLY_MAPPED)
4466 {
4467 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004468 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004469 }
4470 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4471 mode == MODE::MAP_TWO_TIMES)
4472 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004473 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004474 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004475 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004476
4477 if(mode == MODE::MAP_TWO_TIMES)
4478 {
4479 char* data2 = nullptr;
4480 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004481 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004482 }
4483 }
4484 else if(mode == MODE::DONT_MAP)
4485 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004486 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004487 }
4488 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004489 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004490
4491 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4492 if(data)
4493 data[0xFFFF] = data[0];
4494
4495 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4496 {
4497 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4498
4499 VmaAllocationInfo allocInfo;
4500 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4501 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004502 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004503 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004504 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004505 }
4506
4507 switch(rand.Generate() % 3)
4508 {
4509 case 0: Sleep(0); break; // Yield.
4510 case 1: Sleep(10); break; // 10 ms
4511 // default: No sleep.
4512 }
4513
4514 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4515 if(data)
4516 data[0xFFFF] = data[0];
4517 }
4518
4519 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4520 {
4521 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4522 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4523 {
4524 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4525
4526 VmaAllocationInfo allocInfo;
4527 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004528 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004529 }
4530
4531 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4532 }
4533 });
4534 }
4535
4536 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4537 threads[threadIndex].join();
4538
4539 vmaDestroyPool(g_hAllocator, pool);
4540 }
4541}
4542
4543static void WriteMainTestResultHeader(FILE* file)
4544{
4545 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004546 "Code,Time,"
4547 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004548 "Total Time (us),"
4549 "Allocation Time Min (us),"
4550 "Allocation Time Avg (us),"
4551 "Allocation Time Max (us),"
4552 "Deallocation Time Min (us),"
4553 "Deallocation Time Avg (us),"
4554 "Deallocation Time Max (us),"
4555 "Total Memory Allocated (B),"
4556 "Free Range Size Avg (B),"
4557 "Free Range Size Max (B)\n");
4558}
4559
4560static void WriteMainTestResult(
4561 FILE* file,
4562 const char* codeDescription,
4563 const char* testDescription,
4564 const Config& config, const Result& result)
4565{
4566 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4567 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4568 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4569 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4570 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4571 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4572 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4573
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004574 std::string currTime;
4575 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004576
4577 fprintf(file,
4578 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004579 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4580 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004581 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004582 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004583 totalTimeSeconds * 1e6f,
4584 allocationTimeMinSeconds * 1e6f,
4585 allocationTimeAvgSeconds * 1e6f,
4586 allocationTimeMaxSeconds * 1e6f,
4587 deallocationTimeMinSeconds * 1e6f,
4588 deallocationTimeAvgSeconds * 1e6f,
4589 deallocationTimeMaxSeconds * 1e6f,
4590 result.TotalMemoryAllocated,
4591 result.FreeRangeSizeAvg,
4592 result.FreeRangeSizeMax);
4593}
4594
4595static void WritePoolTestResultHeader(FILE* file)
4596{
4597 fprintf(file,
4598 "Code,Test,Time,"
4599 "Config,"
4600 "Total Time (us),"
4601 "Allocation Time Min (us),"
4602 "Allocation Time Avg (us),"
4603 "Allocation Time Max (us),"
4604 "Deallocation Time Min (us),"
4605 "Deallocation Time Avg (us),"
4606 "Deallocation Time Max (us),"
4607 "Lost Allocation Count,"
4608 "Lost Allocation Total Size (B),"
4609 "Failed Allocation Count,"
4610 "Failed Allocation Total Size (B)\n");
4611}
4612
4613static void WritePoolTestResult(
4614 FILE* file,
4615 const char* codeDescription,
4616 const char* testDescription,
4617 const PoolTestConfig& config,
4618 const PoolTestResult& result)
4619{
4620 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4621 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4622 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4623 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4624 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4625 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4626 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4627
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004628 std::string currTime;
4629 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004630
4631 fprintf(file,
4632 "%s,%s,%s,"
4633 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4634 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4635 // General
4636 codeDescription,
4637 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004638 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004639 // Config
4640 config.ThreadCount,
4641 (unsigned long long)config.PoolSize,
4642 config.FrameCount,
4643 config.TotalItemCount,
4644 config.UsedItemCountMin,
4645 config.UsedItemCountMax,
4646 config.ItemsToMakeUnusedPercent,
4647 // Results
4648 totalTimeSeconds * 1e6f,
4649 allocationTimeMinSeconds * 1e6f,
4650 allocationTimeAvgSeconds * 1e6f,
4651 allocationTimeMaxSeconds * 1e6f,
4652 deallocationTimeMinSeconds * 1e6f,
4653 deallocationTimeAvgSeconds * 1e6f,
4654 deallocationTimeMaxSeconds * 1e6f,
4655 result.LostAllocationCount,
4656 result.LostAllocationTotalSize,
4657 result.FailedAllocationCount,
4658 result.FailedAllocationTotalSize);
4659}
4660
4661static void PerformCustomMainTest(FILE* file)
4662{
4663 Config config{};
4664 config.RandSeed = 65735476;
4665 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4666 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4667 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4668 config.FreeOrder = FREE_ORDER::FORWARD;
4669 config.ThreadCount = 16;
4670 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004671 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004672
4673 // Buffers
4674 //config.AllocationSizes.push_back({4, 16, 1024});
4675 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4676
4677 // Images
4678 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4679 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4680
4681 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4682 config.AdditionalOperationCount = 1024;
4683
4684 Result result{};
4685 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004686 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004687 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4688}
4689
4690static void PerformCustomPoolTest(FILE* file)
4691{
4692 PoolTestConfig config;
4693 config.PoolSize = 100 * 1024 * 1024;
4694 config.RandSeed = 2345764;
4695 config.ThreadCount = 1;
4696 config.FrameCount = 200;
4697 config.ItemsToMakeUnusedPercent = 2;
4698
4699 AllocationSize allocSize = {};
4700 allocSize.BufferSizeMin = 1024;
4701 allocSize.BufferSizeMax = 1024 * 1024;
4702 allocSize.Probability = 1;
4703 config.AllocationSizes.push_back(allocSize);
4704
4705 allocSize.BufferSizeMin = 0;
4706 allocSize.BufferSizeMax = 0;
4707 allocSize.ImageSizeMin = 128;
4708 allocSize.ImageSizeMax = 1024;
4709 allocSize.Probability = 1;
4710 config.AllocationSizes.push_back(allocSize);
4711
4712 config.PoolSize = config.CalcAvgResourceSize() * 200;
4713 config.UsedItemCountMax = 160;
4714 config.TotalItemCount = config.UsedItemCountMax * 10;
4715 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4716
4717 g_MemoryAliasingWarningEnabled = false;
4718 PoolTestResult result = {};
4719 TestPool_Benchmark(result, config);
4720 g_MemoryAliasingWarningEnabled = true;
4721
4722 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4723}
4724
Adam Sawickib8333fb2018-03-13 16:15:53 +01004725static void PerformMainTests(FILE* file)
4726{
4727 uint32_t repeatCount = 1;
4728 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4729
4730 Config config{};
4731 config.RandSeed = 65735476;
4732 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4733 config.FreeOrder = FREE_ORDER::FORWARD;
4734
4735 size_t threadCountCount = 1;
4736 switch(ConfigType)
4737 {
4738 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4739 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4740 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4741 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4742 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4743 default: assert(0);
4744 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004745
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004746 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004747
Adam Sawickib8333fb2018-03-13 16:15:53 +01004748 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4749 {
4750 std::string desc1;
4751
4752 switch(threadCountIndex)
4753 {
4754 case 0:
4755 desc1 += "1_thread";
4756 config.ThreadCount = 1;
4757 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4758 break;
4759 case 1:
4760 desc1 += "16_threads+0%_common";
4761 config.ThreadCount = 16;
4762 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4763 break;
4764 case 2:
4765 desc1 += "16_threads+50%_common";
4766 config.ThreadCount = 16;
4767 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4768 break;
4769 case 3:
4770 desc1 += "16_threads+100%_common";
4771 config.ThreadCount = 16;
4772 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4773 break;
4774 case 4:
4775 desc1 += "2_threads+0%_common";
4776 config.ThreadCount = 2;
4777 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4778 break;
4779 case 5:
4780 desc1 += "2_threads+50%_common";
4781 config.ThreadCount = 2;
4782 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4783 break;
4784 case 6:
4785 desc1 += "2_threads+100%_common";
4786 config.ThreadCount = 2;
4787 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4788 break;
4789 default:
4790 assert(0);
4791 }
4792
4793 // 0 = buffers, 1 = images, 2 = buffers and images
4794 size_t buffersVsImagesCount = 2;
4795 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4796 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4797 {
4798 std::string desc2 = desc1;
4799 switch(buffersVsImagesIndex)
4800 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004801 case 0: desc2 += ",Buffers"; break;
4802 case 1: desc2 += ",Images"; break;
4803 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004804 default: assert(0);
4805 }
4806
4807 // 0 = small, 1 = large, 2 = small and large
4808 size_t smallVsLargeCount = 2;
4809 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4810 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4811 {
4812 std::string desc3 = desc2;
4813 switch(smallVsLargeIndex)
4814 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004815 case 0: desc3 += ",Small"; break;
4816 case 1: desc3 += ",Large"; break;
4817 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004818 default: assert(0);
4819 }
4820
4821 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4822 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4823 else
4824 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4825
4826 // 0 = varying sizes min...max, 1 = set of constant sizes
4827 size_t constantSizesCount = 1;
4828 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4829 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4830 {
4831 std::string desc4 = desc3;
4832 switch(constantSizesIndex)
4833 {
4834 case 0: desc4 += " Varying_sizes"; break;
4835 case 1: desc4 += " Constant_sizes"; break;
4836 default: assert(0);
4837 }
4838
4839 config.AllocationSizes.clear();
4840 // Buffers present
4841 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4842 {
4843 // Small
4844 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4845 {
4846 // Varying size
4847 if(constantSizesIndex == 0)
4848 config.AllocationSizes.push_back({4, 16, 1024});
4849 // Constant sizes
4850 else
4851 {
4852 config.AllocationSizes.push_back({1, 16, 16});
4853 config.AllocationSizes.push_back({1, 64, 64});
4854 config.AllocationSizes.push_back({1, 256, 256});
4855 config.AllocationSizes.push_back({1, 1024, 1024});
4856 }
4857 }
4858 // Large
4859 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4860 {
4861 // Varying size
4862 if(constantSizesIndex == 0)
4863 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4864 // Constant sizes
4865 else
4866 {
4867 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4868 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4869 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4870 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4871 }
4872 }
4873 }
4874 // Images present
4875 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4876 {
4877 // Small
4878 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4879 {
4880 // Varying size
4881 if(constantSizesIndex == 0)
4882 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4883 // Constant sizes
4884 else
4885 {
4886 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4887 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4888 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4889 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4890 }
4891 }
4892 // Large
4893 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4894 {
4895 // Varying size
4896 if(constantSizesIndex == 0)
4897 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4898 // Constant sizes
4899 else
4900 {
4901 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4902 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4903 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4904 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4905 }
4906 }
4907 }
4908
4909 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4910 size_t beginBytesToAllocateCount = 1;
4911 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4912 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4913 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4914 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4915 {
4916 std::string desc5 = desc4;
4917
4918 switch(beginBytesToAllocateIndex)
4919 {
4920 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004921 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004922 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4923 config.AdditionalOperationCount = 0;
4924 break;
4925 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004926 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004927 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4928 config.AdditionalOperationCount = 1024;
4929 break;
4930 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004931 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004932 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4933 config.AdditionalOperationCount = 1024;
4934 break;
4935 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004936 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004937 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4938 config.AdditionalOperationCount = 1024;
4939 break;
4940 default:
4941 assert(0);
4942 }
4943
Adam Sawicki0667e332018-08-24 17:26:44 +02004944 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004945 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004946 std::string desc6 = desc5;
4947 switch(strategyIndex)
4948 {
4949 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004950 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004951 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4952 break;
4953 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004954 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004955 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4956 break;
4957 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004958 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004959 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4960 break;
4961 default:
4962 assert(0);
4963 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004964
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004965 desc6 += ',';
4966 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004967
4968 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004969
4970 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4971 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004972 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004973
4974 Result result{};
4975 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004976 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004977 if(file)
4978 {
4979 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4980 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004981 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004982 }
4983 }
4984 }
4985 }
4986 }
4987 }
4988}
4989
4990static void PerformPoolTests(FILE* file)
4991{
4992 const size_t AVG_RESOURCES_PER_POOL = 300;
4993
4994 uint32_t repeatCount = 1;
4995 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4996
4997 PoolTestConfig config{};
4998 config.RandSeed = 2346343;
4999 config.FrameCount = 200;
5000 config.ItemsToMakeUnusedPercent = 2;
5001
5002 size_t threadCountCount = 1;
5003 switch(ConfigType)
5004 {
5005 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
5006 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
5007 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
5008 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
5009 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
5010 default: assert(0);
5011 }
5012 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
5013 {
5014 std::string desc1;
5015
5016 switch(threadCountIndex)
5017 {
5018 case 0:
5019 desc1 += "1_thread";
5020 config.ThreadCount = 1;
5021 break;
5022 case 1:
5023 desc1 += "16_threads";
5024 config.ThreadCount = 16;
5025 break;
5026 case 2:
5027 desc1 += "2_threads";
5028 config.ThreadCount = 2;
5029 break;
5030 default:
5031 assert(0);
5032 }
5033
5034 // 0 = buffers, 1 = images, 2 = buffers and images
5035 size_t buffersVsImagesCount = 2;
5036 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
5037 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
5038 {
5039 std::string desc2 = desc1;
5040 switch(buffersVsImagesIndex)
5041 {
5042 case 0: desc2 += " Buffers"; break;
5043 case 1: desc2 += " Images"; break;
5044 case 2: desc2 += " Buffers+Images"; break;
5045 default: assert(0);
5046 }
5047
5048 // 0 = small, 1 = large, 2 = small and large
5049 size_t smallVsLargeCount = 2;
5050 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
5051 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
5052 {
5053 std::string desc3 = desc2;
5054 switch(smallVsLargeIndex)
5055 {
5056 case 0: desc3 += " Small"; break;
5057 case 1: desc3 += " Large"; break;
5058 case 2: desc3 += " Small+Large"; break;
5059 default: assert(0);
5060 }
5061
5062 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5063 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
5064 else
5065 config.PoolSize = 4ull * 1024 * 1024;
5066
5067 // 0 = varying sizes min...max, 1 = set of constant sizes
5068 size_t constantSizesCount = 1;
5069 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
5070 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
5071 {
5072 std::string desc4 = desc3;
5073 switch(constantSizesIndex)
5074 {
5075 case 0: desc4 += " Varying_sizes"; break;
5076 case 1: desc4 += " Constant_sizes"; break;
5077 default: assert(0);
5078 }
5079
5080 config.AllocationSizes.clear();
5081 // Buffers present
5082 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
5083 {
5084 // Small
5085 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5086 {
5087 // Varying size
5088 if(constantSizesIndex == 0)
5089 config.AllocationSizes.push_back({4, 16, 1024});
5090 // Constant sizes
5091 else
5092 {
5093 config.AllocationSizes.push_back({1, 16, 16});
5094 config.AllocationSizes.push_back({1, 64, 64});
5095 config.AllocationSizes.push_back({1, 256, 256});
5096 config.AllocationSizes.push_back({1, 1024, 1024});
5097 }
5098 }
5099 // Large
5100 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5101 {
5102 // Varying size
5103 if(constantSizesIndex == 0)
5104 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5105 // Constant sizes
5106 else
5107 {
5108 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
5109 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
5110 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
5111 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
5112 }
5113 }
5114 }
5115 // Images present
5116 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
5117 {
5118 // Small
5119 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5120 {
5121 // Varying size
5122 if(constantSizesIndex == 0)
5123 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5124 // Constant sizes
5125 else
5126 {
5127 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
5128 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
5129 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
5130 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
5131 }
5132 }
5133 // Large
5134 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5135 {
5136 // Varying size
5137 if(constantSizesIndex == 0)
5138 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5139 // Constant sizes
5140 else
5141 {
5142 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
5143 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
5144 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
5145 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
5146 }
5147 }
5148 }
5149
5150 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
5151 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
5152
5153 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
5154 size_t subscriptionModeCount;
5155 switch(ConfigType)
5156 {
5157 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
5158 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
5159 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
5160 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
5161 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
5162 default: assert(0);
5163 }
5164 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
5165 {
5166 std::string desc5 = desc4;
5167
5168 switch(subscriptionModeIndex)
5169 {
5170 case 0:
5171 desc5 += " Subscription_66%";
5172 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
5173 break;
5174 case 1:
5175 desc5 += " Subscription_133%";
5176 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
5177 break;
5178 case 2:
5179 desc5 += " Subscription_100%";
5180 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
5181 break;
5182 case 3:
5183 desc5 += " Subscription_33%";
5184 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
5185 break;
5186 case 4:
5187 desc5 += " Subscription_166%";
5188 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
5189 break;
5190 default:
5191 assert(0);
5192 }
5193
5194 config.TotalItemCount = config.UsedItemCountMax * 5;
5195 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
5196
5197 const char* testDescription = desc5.c_str();
5198
5199 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
5200 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005201 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005202
5203 PoolTestResult result{};
5204 g_MemoryAliasingWarningEnabled = false;
5205 TestPool_Benchmark(result, config);
5206 g_MemoryAliasingWarningEnabled = true;
5207 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
5208 }
5209 }
5210 }
5211 }
5212 }
5213 }
5214}
5215
Adam Sawickia83793a2018-09-03 13:40:42 +02005216static void BasicTestBuddyAllocator()
5217{
5218 wprintf(L"Basic test buddy allocator\n");
5219
5220 RandomNumberGenerator rand{76543};
5221
5222 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5223 sampleBufCreateInfo.size = 1024; // Whatever.
5224 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5225
5226 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5227 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5228
5229 VmaPoolCreateInfo poolCreateInfo = {};
5230 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005231 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005232
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02005233 // Deliberately adding 1023 to test usable size smaller than memory block size.
5234 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02005235 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02005236 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02005237
5238 VmaPool pool = nullptr;
5239 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005240 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005241
5242 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
5243
5244 VmaAllocationCreateInfo allocCreateInfo = {};
5245 allocCreateInfo.pool = pool;
5246
5247 std::vector<BufferInfo> bufInfo;
5248 BufferInfo newBufInfo;
5249 VmaAllocationInfo allocInfo;
5250
5251 bufCreateInfo.size = 1024 * 256;
5252 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5253 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005254 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005255 bufInfo.push_back(newBufInfo);
5256
5257 bufCreateInfo.size = 1024 * 512;
5258 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5259 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005260 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005261 bufInfo.push_back(newBufInfo);
5262
5263 bufCreateInfo.size = 1024 * 128;
5264 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5265 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005266 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005267 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02005268
5269 // Test very small allocation, smaller than minimum node size.
5270 bufCreateInfo.size = 1;
5271 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5272 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005273 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02005274 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02005275
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005276 // Test some small allocation with alignment requirement.
5277 {
5278 VkMemoryRequirements memReq;
5279 memReq.alignment = 256;
5280 memReq.memoryTypeBits = UINT32_MAX;
5281 memReq.size = 32;
5282
5283 newBufInfo.Buffer = VK_NULL_HANDLE;
5284 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
5285 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005286 TEST(res == VK_SUCCESS);
5287 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005288 bufInfo.push_back(newBufInfo);
5289 }
5290
5291 //SaveAllocatorStatsToFile(L"TEST.json");
5292
Adam Sawicki21017c62018-09-07 15:26:59 +02005293 VmaPoolStats stats = {};
5294 vmaGetPoolStats(g_hAllocator, pool, &stats);
5295 int DBG = 0; // Set breakpoint here to inspect `stats`.
5296
Adam Sawicki80927152018-09-07 17:27:23 +02005297 // Allocate enough new buffers to surely fall into second block.
5298 for(uint32_t i = 0; i < 32; ++i)
5299 {
5300 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
5301 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5302 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005303 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02005304 bufInfo.push_back(newBufInfo);
5305 }
5306
5307 SaveAllocatorStatsToFile(L"BuddyTest01.json");
5308
Adam Sawickia83793a2018-09-03 13:40:42 +02005309 // Destroy the buffers in random order.
5310 while(!bufInfo.empty())
5311 {
5312 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
5313 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
5314 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
5315 bufInfo.erase(bufInfo.begin() + indexToDestroy);
5316 }
5317
5318 vmaDestroyPool(g_hAllocator, pool);
5319}
5320
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005321static void BasicTestAllocatePages()
5322{
5323 wprintf(L"Basic test allocate pages\n");
5324
5325 RandomNumberGenerator rand{765461};
5326
5327 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5328 sampleBufCreateInfo.size = 1024; // Whatever.
5329 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5330
5331 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5332 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5333
5334 VmaPoolCreateInfo poolCreateInfo = {};
5335 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02005336 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005337
5338 // 1 block of 1 MB.
5339 poolCreateInfo.blockSize = 1024 * 1024;
5340 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
5341
5342 // Create pool.
5343 VmaPool pool = nullptr;
5344 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02005345 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005346
5347 // Make 100 allocations of 4 KB - they should fit into the pool.
5348 VkMemoryRequirements memReq;
5349 memReq.memoryTypeBits = UINT32_MAX;
5350 memReq.alignment = 4 * 1024;
5351 memReq.size = 4 * 1024;
5352
5353 VmaAllocationCreateInfo allocCreateInfo = {};
5354 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5355 allocCreateInfo.pool = pool;
5356
5357 constexpr uint32_t allocCount = 100;
5358
5359 std::vector<VmaAllocation> alloc{allocCount};
5360 std::vector<VmaAllocationInfo> allocInfo{allocCount};
5361 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005362 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005363 for(uint32_t i = 0; i < allocCount; ++i)
5364 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005365 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005366 allocInfo[i].pMappedData != nullptr &&
5367 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5368 allocInfo[i].memoryType == allocInfo[0].memoryType);
5369 }
5370
5371 // Free the allocations.
5372 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5373 std::fill(alloc.begin(), alloc.end(), nullptr);
5374 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5375
5376 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5377 // Also test optional allocationInfo = null.
5378 memReq.size = 100 * 1024;
5379 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005380 TEST(res != VK_SUCCESS);
5381 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005382
5383 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5384 memReq.size = 4 * 1024;
5385 memReq.alignment = 128 * 1024;
5386 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005387 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005388
5389 // Make 100 dedicated allocations of 4 KB.
5390 memReq.alignment = 4 * 1024;
5391 memReq.size = 4 * 1024;
5392
5393 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5394 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5395 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5396 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005397 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005398 for(uint32_t i = 0; i < allocCount; ++i)
5399 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005400 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005401 allocInfo[i].pMappedData != nullptr &&
5402 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5403 allocInfo[i].offset == 0);
5404 if(i > 0)
5405 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005406 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005407 }
5408 }
5409
5410 // Free the allocations.
5411 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5412 std::fill(alloc.begin(), alloc.end(), nullptr);
5413 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5414
5415 vmaDestroyPool(g_hAllocator, pool);
5416}
5417
Adam Sawickif2975342018-10-16 13:49:02 +02005418// Test the testing environment.
5419static void TestGpuData()
5420{
5421 RandomNumberGenerator rand = { 53434 };
5422
5423 std::vector<AllocInfo> allocInfo;
5424
5425 for(size_t i = 0; i < 100; ++i)
5426 {
5427 AllocInfo info = {};
5428
5429 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5430 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5431 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5432 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5433 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5434
5435 VmaAllocationCreateInfo allocCreateInfo = {};
5436 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5437
5438 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5439 TEST(res == VK_SUCCESS);
5440
5441 info.m_StartValue = rand.Generate();
5442
5443 allocInfo.push_back(std::move(info));
5444 }
5445
5446 UploadGpuData(allocInfo.data(), allocInfo.size());
5447
5448 ValidateGpuData(allocInfo.data(), allocInfo.size());
5449
5450 DestroyAllAllocations(allocInfo);
5451}
5452
Adam Sawickib8333fb2018-03-13 16:15:53 +01005453void Test()
5454{
5455 wprintf(L"TESTING:\n");
5456
Adam Sawicki48b8a332019-11-02 15:24:33 +01005457 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005458 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005459 ////////////////////////////////////////////////////////////////////////////////
5460 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02005461 return;
5462 }
5463
Adam Sawickib8333fb2018-03-13 16:15:53 +01005464 // # Simple tests
5465
5466 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005467 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005468#if VMA_DEBUG_MARGIN
5469 TestDebugMargin();
5470#else
5471 TestPool_SameSize();
Adam Sawickiddcbf8c2019-11-22 15:22:42 +01005472 TestPool_MinBlockCount();
Adam Sawicki212a4a62018-06-14 15:44:45 +02005473 TestHeapSizeLimit();
5474#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005475#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5476 TestAllocationsInitialization();
5477#endif
Adam Sawickiefa88c42019-11-18 16:33:56 +01005478 TestMemoryUsage();
Adam Sawicki40ffe982019-10-11 15:56:02 +02005479 TestBudget();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005480 TestMapping();
Adam Sawickidaa6a552019-06-25 15:26:37 +02005481 TestDeviceLocalMapped();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005482 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005483 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005484 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005485 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005486
Adam Sawicki4338f662018-09-07 14:12:37 +02005487 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005488 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005489
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005490 {
5491 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005492 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005493 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005494 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005495 fclose(file);
5496 }
5497
Adam Sawickib8333fb2018-03-13 16:15:53 +01005498 TestDefragmentationSimple();
5499 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005500 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005501 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005502
5503 // # Detailed tests
5504 FILE* file;
5505 fopen_s(&file, "Results.csv", "w");
5506 assert(file != NULL);
5507
5508 WriteMainTestResultHeader(file);
5509 PerformMainTests(file);
5510 //PerformCustomMainTest(file);
5511
5512 WritePoolTestResultHeader(file);
5513 PerformPoolTests(file);
5514 //PerformCustomPoolTest(file);
5515
5516 fclose(file);
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01005517
Adam Sawickib8333fb2018-03-13 16:15:53 +01005518 wprintf(L"Done.\n");
5519}
5520
Adam Sawickif1a793c2018-03-13 15:42:22 +01005521#endif // #ifdef _WIN32