blob: 798f8f9ea99b46cccd6cc49d6f9c0c224a1017f3 [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
2// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawicki94ce3d72019-04-17 14:59:25 +020029#include <functional>
Adam Sawickif1a793c2018-03-13 15:42:22 +010030
31#ifdef _WIN32
32
Adam Sawicki33d2ce72018-08-27 13:59:13 +020033static const char* CODE_DESCRIPTION = "Foo";
34
Adam Sawickif2975342018-10-16 13:49:02 +020035extern VkCommandBuffer g_hTemporaryCommandBuffer;
36void BeginSingleTimeCommands();
37void EndSingleTimeCommands();
38
Adam Sawickibdb89a92018-12-13 11:56:30 +010039#ifndef VMA_DEBUG_MARGIN
40 #define VMA_DEBUG_MARGIN 0
41#endif
42
Adam Sawicki0a607132018-08-24 11:18:41 +020043enum CONFIG_TYPE {
44 CONFIG_TYPE_MINIMUM,
45 CONFIG_TYPE_SMALL,
46 CONFIG_TYPE_AVERAGE,
47 CONFIG_TYPE_LARGE,
48 CONFIG_TYPE_MAXIMUM,
49 CONFIG_TYPE_COUNT
50};
51
Adam Sawickif2975342018-10-16 13:49:02 +020052static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
53//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020054
Adam Sawickib8333fb2018-03-13 16:15:53 +010055enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
56
Adam Sawicki0667e332018-08-24 17:26:44 +020057static const char* FREE_ORDER_NAMES[] = {
58 "FORWARD",
59 "BACKWARD",
60 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020061};
62
Adam Sawicki80927152018-09-07 17:27:23 +020063// Copy of internal VmaAlgorithmToStr.
64static const char* AlgorithmToStr(uint32_t algorithm)
65{
66 switch(algorithm)
67 {
68 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
69 return "Linear";
70 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
71 return "Buddy";
72 case 0:
73 return "Default";
74 default:
75 assert(0);
76 return "";
77 }
78}
79
Adam Sawickib8333fb2018-03-13 16:15:53 +010080struct AllocationSize
81{
82 uint32_t Probability;
83 VkDeviceSize BufferSizeMin, BufferSizeMax;
84 uint32_t ImageSizeMin, ImageSizeMax;
85};
86
87struct Config
88{
89 uint32_t RandSeed;
90 VkDeviceSize BeginBytesToAllocate;
91 uint32_t AdditionalOperationCount;
92 VkDeviceSize MaxBytesToAllocate;
93 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
94 std::vector<AllocationSize> AllocationSizes;
95 uint32_t ThreadCount;
96 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
97 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020098 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010099};
100
101struct Result
102{
103 duration TotalTime;
104 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
105 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
106 VkDeviceSize TotalMemoryAllocated;
107 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
108};
109
110void TestDefragmentationSimple();
111void TestDefragmentationFull();
112
113struct PoolTestConfig
114{
115 uint32_t RandSeed;
116 uint32_t ThreadCount;
117 VkDeviceSize PoolSize;
118 uint32_t FrameCount;
119 uint32_t TotalItemCount;
120 // Range for number of items used in each frame.
121 uint32_t UsedItemCountMin, UsedItemCountMax;
122 // Percent of items to make unused, and possibly make some others used in each frame.
123 uint32_t ItemsToMakeUnusedPercent;
124 std::vector<AllocationSize> AllocationSizes;
125
126 VkDeviceSize CalcAvgResourceSize() const
127 {
128 uint32_t probabilitySum = 0;
129 VkDeviceSize sizeSum = 0;
130 for(size_t i = 0; i < AllocationSizes.size(); ++i)
131 {
132 const AllocationSize& allocSize = AllocationSizes[i];
133 if(allocSize.BufferSizeMax > 0)
134 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
135 else
136 {
137 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
138 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
139 }
140 probabilitySum += allocSize.Probability;
141 }
142 return sizeSum / probabilitySum;
143 }
144
145 bool UsesBuffers() const
146 {
147 for(size_t i = 0; i < AllocationSizes.size(); ++i)
148 if(AllocationSizes[i].BufferSizeMax > 0)
149 return true;
150 return false;
151 }
152
153 bool UsesImages() const
154 {
155 for(size_t i = 0; i < AllocationSizes.size(); ++i)
156 if(AllocationSizes[i].ImageSizeMax > 0)
157 return true;
158 return false;
159 }
160};
161
162struct PoolTestResult
163{
164 duration TotalTime;
165 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
166 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
167 size_t LostAllocationCount, LostAllocationTotalSize;
168 size_t FailedAllocationCount, FailedAllocationTotalSize;
169};
170
171static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
172
Adam Sawicki51fa9662018-10-03 13:44:29 +0200173uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200174
Adam Sawickib8333fb2018-03-13 16:15:53 +0100175struct BufferInfo
176{
177 VkBuffer Buffer = VK_NULL_HANDLE;
178 VmaAllocation Allocation = VK_NULL_HANDLE;
179};
180
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200181static uint32_t GetAllocationStrategyCount()
182{
183 uint32_t strategyCount = 0;
184 switch(ConfigType)
185 {
186 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
187 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
188 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
189 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
190 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
191 default: assert(0);
192 }
193 return strategyCount;
194}
195
196static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
197{
198 switch(allocStrategy)
199 {
200 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
201 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
202 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
203 case 0: return "Default"; break;
204 default: assert(0); return "";
205 }
206}
207
Adam Sawickib8333fb2018-03-13 16:15:53 +0100208static void InitResult(Result& outResult)
209{
210 outResult.TotalTime = duration::zero();
211 outResult.AllocationTimeMin = duration::max();
212 outResult.AllocationTimeAvg = duration::zero();
213 outResult.AllocationTimeMax = duration::min();
214 outResult.DeallocationTimeMin = duration::max();
215 outResult.DeallocationTimeAvg = duration::zero();
216 outResult.DeallocationTimeMax = duration::min();
217 outResult.TotalMemoryAllocated = 0;
218 outResult.FreeRangeSizeAvg = 0;
219 outResult.FreeRangeSizeMax = 0;
220}
221
222class TimeRegisterObj
223{
224public:
225 TimeRegisterObj(duration& min, duration& sum, duration& max) :
226 m_Min(min),
227 m_Sum(sum),
228 m_Max(max),
229 m_TimeBeg(std::chrono::high_resolution_clock::now())
230 {
231 }
232
233 ~TimeRegisterObj()
234 {
235 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
236 m_Sum += d;
237 if(d < m_Min) m_Min = d;
238 if(d > m_Max) m_Max = d;
239 }
240
241private:
242 duration& m_Min;
243 duration& m_Sum;
244 duration& m_Max;
245 time_point m_TimeBeg;
246};
247
248struct PoolTestThreadResult
249{
250 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
251 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
252 size_t AllocationCount, DeallocationCount;
253 size_t LostAllocationCount, LostAllocationTotalSize;
254 size_t FailedAllocationCount, FailedAllocationTotalSize;
255};
256
257class AllocationTimeRegisterObj : public TimeRegisterObj
258{
259public:
260 AllocationTimeRegisterObj(Result& result) :
261 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
262 {
263 }
264};
265
266class DeallocationTimeRegisterObj : public TimeRegisterObj
267{
268public:
269 DeallocationTimeRegisterObj(Result& result) :
270 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
271 {
272 }
273};
274
275class PoolAllocationTimeRegisterObj : public TimeRegisterObj
276{
277public:
278 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
279 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
280 {
281 }
282};
283
284class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
285{
286public:
287 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
288 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
289 {
290 }
291};
292
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200293static void CurrentTimeToStr(std::string& out)
294{
295 time_t rawTime; time(&rawTime);
296 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
297 char timeStr[128];
298 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
299 out = timeStr;
300}
301
Adam Sawickib8333fb2018-03-13 16:15:53 +0100302VkResult MainTest(Result& outResult, const Config& config)
303{
304 assert(config.ThreadCount > 0);
305
306 InitResult(outResult);
307
308 RandomNumberGenerator mainRand{config.RandSeed};
309
310 time_point timeBeg = std::chrono::high_resolution_clock::now();
311
312 std::atomic<size_t> allocationCount = 0;
313 VkResult res = VK_SUCCESS;
314
315 uint32_t memUsageProbabilitySum =
316 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
317 config.MemUsageProbability[2] + config.MemUsageProbability[3];
318 assert(memUsageProbabilitySum > 0);
319
320 uint32_t allocationSizeProbabilitySum = std::accumulate(
321 config.AllocationSizes.begin(),
322 config.AllocationSizes.end(),
323 0u,
324 [](uint32_t sum, const AllocationSize& allocSize) {
325 return sum + allocSize.Probability;
326 });
327
328 struct Allocation
329 {
330 VkBuffer Buffer;
331 VkImage Image;
332 VmaAllocation Alloc;
333 };
334
335 std::vector<Allocation> commonAllocations;
336 std::mutex commonAllocationsMutex;
337
338 auto Allocate = [&](
339 VkDeviceSize bufferSize,
340 const VkExtent2D imageExtent,
341 RandomNumberGenerator& localRand,
342 VkDeviceSize& totalAllocatedBytes,
343 std::vector<Allocation>& allocations) -> VkResult
344 {
345 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
346
347 uint32_t memUsageIndex = 0;
348 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
349 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
350 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
351
352 VmaAllocationCreateInfo memReq = {};
353 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200354 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100355
356 Allocation allocation = {};
357 VmaAllocationInfo allocationInfo;
358
359 // Buffer
360 if(bufferSize > 0)
361 {
362 assert(imageExtent.width == 0);
363 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
364 bufferInfo.size = bufferSize;
365 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
366
367 {
368 AllocationTimeRegisterObj timeRegisterObj{outResult};
369 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
370 }
371 }
372 // Image
373 else
374 {
375 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
376 imageInfo.imageType = VK_IMAGE_TYPE_2D;
377 imageInfo.extent.width = imageExtent.width;
378 imageInfo.extent.height = imageExtent.height;
379 imageInfo.extent.depth = 1;
380 imageInfo.mipLevels = 1;
381 imageInfo.arrayLayers = 1;
382 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
383 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
384 VK_IMAGE_TILING_OPTIMAL :
385 VK_IMAGE_TILING_LINEAR;
386 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
387 switch(memReq.usage)
388 {
389 case VMA_MEMORY_USAGE_GPU_ONLY:
390 switch(localRand.Generate() % 3)
391 {
392 case 0:
393 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
394 break;
395 case 1:
396 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
397 break;
398 case 2:
399 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
400 break;
401 }
402 break;
403 case VMA_MEMORY_USAGE_CPU_ONLY:
404 case VMA_MEMORY_USAGE_CPU_TO_GPU:
405 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
406 break;
407 case VMA_MEMORY_USAGE_GPU_TO_CPU:
408 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
409 break;
410 }
411 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
412 imageInfo.flags = 0;
413
414 {
415 AllocationTimeRegisterObj timeRegisterObj{outResult};
416 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
417 }
418 }
419
420 if(res == VK_SUCCESS)
421 {
422 ++allocationCount;
423 totalAllocatedBytes += allocationInfo.size;
424 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
425 if(useCommonAllocations)
426 {
427 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
428 commonAllocations.push_back(allocation);
429 }
430 else
431 allocations.push_back(allocation);
432 }
433 else
434 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200435 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100436 }
437 return res;
438 };
439
440 auto GetNextAllocationSize = [&](
441 VkDeviceSize& outBufSize,
442 VkExtent2D& outImageSize,
443 RandomNumberGenerator& localRand)
444 {
445 outBufSize = 0;
446 outImageSize = {0, 0};
447
448 uint32_t allocSizeIndex = 0;
449 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
450 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
451 r -= config.AllocationSizes[allocSizeIndex++].Probability;
452
453 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
454 if(allocSize.BufferSizeMax > 0)
455 {
456 assert(allocSize.ImageSizeMax == 0);
457 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
458 outBufSize = allocSize.BufferSizeMin;
459 else
460 {
461 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
462 outBufSize = outBufSize / 16 * 16;
463 }
464 }
465 else
466 {
467 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
468 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
469 else
470 {
471 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
472 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
473 }
474 }
475 };
476
477 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
478 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
479
480 auto ThreadProc = [&](uint32_t randSeed) -> void
481 {
482 RandomNumberGenerator threadRand(randSeed);
483 VkDeviceSize threadTotalAllocatedBytes = 0;
484 std::vector<Allocation> threadAllocations;
485 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
486 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
487 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
488
489 // BEGIN ALLOCATIONS
490 for(;;)
491 {
492 VkDeviceSize bufferSize = 0;
493 VkExtent2D imageExtent = {};
494 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
495 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
496 threadBeginBytesToAllocate)
497 {
498 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
499 break;
500 }
501 else
502 break;
503 }
504
505 // ADDITIONAL ALLOCATIONS AND FREES
506 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
507 {
508 VkDeviceSize bufferSize = 0;
509 VkExtent2D imageExtent = {};
510 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
511
512 // true = allocate, false = free
513 bool allocate = threadRand.Generate() % 2 != 0;
514
515 if(allocate)
516 {
517 if(threadTotalAllocatedBytes +
518 bufferSize +
519 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
520 threadMaxBytesToAllocate)
521 {
522 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
523 break;
524 }
525 }
526 else
527 {
528 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
529 if(useCommonAllocations)
530 {
531 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
532 if(!commonAllocations.empty())
533 {
534 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
535 VmaAllocationInfo allocationInfo;
536 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
537 if(threadTotalAllocatedBytes >= allocationInfo.size)
538 {
539 DeallocationTimeRegisterObj timeRegisterObj{outResult};
540 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
541 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
542 else
543 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
544 threadTotalAllocatedBytes -= allocationInfo.size;
545 commonAllocations.erase(commonAllocations.begin() + indexToFree);
546 }
547 }
548 }
549 else
550 {
551 if(!threadAllocations.empty())
552 {
553 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
554 VmaAllocationInfo allocationInfo;
555 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
556 if(threadTotalAllocatedBytes >= allocationInfo.size)
557 {
558 DeallocationTimeRegisterObj timeRegisterObj{outResult};
559 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
560 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
561 else
562 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
563 threadTotalAllocatedBytes -= allocationInfo.size;
564 threadAllocations.erase(threadAllocations.begin() + indexToFree);
565 }
566 }
567 }
568 }
569 }
570
571 ++numThreadsReachedMaxAllocations;
572
573 WaitForSingleObject(threadsFinishEvent, INFINITE);
574
575 // DEALLOCATION
576 while(!threadAllocations.empty())
577 {
578 size_t indexToFree = 0;
579 switch(config.FreeOrder)
580 {
581 case FREE_ORDER::FORWARD:
582 indexToFree = 0;
583 break;
584 case FREE_ORDER::BACKWARD:
585 indexToFree = threadAllocations.size() - 1;
586 break;
587 case FREE_ORDER::RANDOM:
588 indexToFree = mainRand.Generate() % threadAllocations.size();
589 break;
590 }
591
592 {
593 DeallocationTimeRegisterObj timeRegisterObj{outResult};
594 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
595 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
596 else
597 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
598 }
599 threadAllocations.erase(threadAllocations.begin() + indexToFree);
600 }
601 };
602
603 uint32_t threadRandSeed = mainRand.Generate();
604 std::vector<std::thread> bkgThreads;
605 for(size_t i = 0; i < config.ThreadCount; ++i)
606 {
607 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
608 }
609
610 // Wait for threads reached max allocations
611 while(numThreadsReachedMaxAllocations < config.ThreadCount)
612 Sleep(0);
613
614 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
615 VmaStats vmaStats = {};
616 vmaCalculateStats(g_hAllocator, &vmaStats);
617 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
618 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
619 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
620
621 // Signal threads to deallocate
622 SetEvent(threadsFinishEvent);
623
624 // Wait for threads finished
625 for(size_t i = 0; i < bkgThreads.size(); ++i)
626 bkgThreads[i].join();
627 bkgThreads.clear();
628
629 CloseHandle(threadsFinishEvent);
630
631 // Deallocate remaining common resources
632 while(!commonAllocations.empty())
633 {
634 size_t indexToFree = 0;
635 switch(config.FreeOrder)
636 {
637 case FREE_ORDER::FORWARD:
638 indexToFree = 0;
639 break;
640 case FREE_ORDER::BACKWARD:
641 indexToFree = commonAllocations.size() - 1;
642 break;
643 case FREE_ORDER::RANDOM:
644 indexToFree = mainRand.Generate() % commonAllocations.size();
645 break;
646 }
647
648 {
649 DeallocationTimeRegisterObj timeRegisterObj{outResult};
650 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
651 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
652 else
653 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
654 }
655 commonAllocations.erase(commonAllocations.begin() + indexToFree);
656 }
657
658 if(allocationCount)
659 {
660 outResult.AllocationTimeAvg /= allocationCount;
661 outResult.DeallocationTimeAvg /= allocationCount;
662 }
663
664 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
665
666 return res;
667}
668
Adam Sawicki51fa9662018-10-03 13:44:29 +0200669void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100670{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100671 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100672 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200673 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100674 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200675 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100676}
677
678struct AllocInfo
679{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200680 VmaAllocation m_Allocation = VK_NULL_HANDLE;
681 VkBuffer m_Buffer = VK_NULL_HANDLE;
682 VkImage m_Image = VK_NULL_HANDLE;
683 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100684 union
685 {
686 VkBufferCreateInfo m_BufferInfo;
687 VkImageCreateInfo m_ImageInfo;
688 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200689
690 void CreateBuffer(
691 const VkBufferCreateInfo& bufCreateInfo,
692 const VmaAllocationCreateInfo& allocCreateInfo);
693 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100694};
695
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200696void AllocInfo::CreateBuffer(
697 const VkBufferCreateInfo& bufCreateInfo,
698 const VmaAllocationCreateInfo& allocCreateInfo)
699{
700 m_BufferInfo = bufCreateInfo;
701 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
702 TEST(res == VK_SUCCESS);
703}
704
705void AllocInfo::Destroy()
706{
707 if(m_Image)
708 {
709 vkDestroyImage(g_hDevice, m_Image, nullptr);
710 }
711 if(m_Buffer)
712 {
713 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
714 }
715 if(m_Allocation)
716 {
717 vmaFreeMemory(g_hAllocator, m_Allocation);
718 }
719}
720
Adam Sawickif2975342018-10-16 13:49:02 +0200721class StagingBufferCollection
722{
723public:
724 StagingBufferCollection() { }
725 ~StagingBufferCollection();
726 // Returns false if maximum total size of buffers would be exceeded.
727 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
728 void ReleaseAllBuffers();
729
730private:
731 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
732 struct BufInfo
733 {
734 VmaAllocation Allocation = VK_NULL_HANDLE;
735 VkBuffer Buffer = VK_NULL_HANDLE;
736 VkDeviceSize Size = VK_WHOLE_SIZE;
737 void* MappedPtr = nullptr;
738 bool Used = false;
739 };
740 std::vector<BufInfo> m_Bufs;
741 // Including both used and unused.
742 VkDeviceSize m_TotalSize = 0;
743};
744
745StagingBufferCollection::~StagingBufferCollection()
746{
747 for(size_t i = m_Bufs.size(); i--; )
748 {
749 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
750 }
751}
752
753bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
754{
755 assert(size <= MAX_TOTAL_SIZE);
756
757 // Try to find existing unused buffer with best size.
758 size_t bestIndex = SIZE_MAX;
759 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
760 {
761 BufInfo& currBufInfo = m_Bufs[i];
762 if(!currBufInfo.Used && currBufInfo.Size >= size &&
763 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
764 {
765 bestIndex = i;
766 }
767 }
768
769 if(bestIndex != SIZE_MAX)
770 {
771 m_Bufs[bestIndex].Used = true;
772 outBuffer = m_Bufs[bestIndex].Buffer;
773 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
774 return true;
775 }
776
777 // Allocate new buffer with requested size.
778 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
779 {
780 BufInfo bufInfo;
781 bufInfo.Size = size;
782 bufInfo.Used = true;
783
784 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
785 bufCreateInfo.size = size;
786 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
787
788 VmaAllocationCreateInfo allocCreateInfo = {};
789 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
790 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
791
792 VmaAllocationInfo allocInfo;
793 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
794 bufInfo.MappedPtr = allocInfo.pMappedData;
795 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
796
797 outBuffer = bufInfo.Buffer;
798 outMappedPtr = bufInfo.MappedPtr;
799
800 m_Bufs.push_back(std::move(bufInfo));
801
802 m_TotalSize += size;
803
804 return true;
805 }
806
807 // There are some unused but smaller buffers: Free them and try again.
808 bool hasUnused = false;
809 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
810 {
811 if(!m_Bufs[i].Used)
812 {
813 hasUnused = true;
814 break;
815 }
816 }
817 if(hasUnused)
818 {
819 for(size_t i = m_Bufs.size(); i--; )
820 {
821 if(!m_Bufs[i].Used)
822 {
823 m_TotalSize -= m_Bufs[i].Size;
824 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
825 m_Bufs.erase(m_Bufs.begin() + i);
826 }
827 }
828
829 return AcquireBuffer(size, outBuffer, outMappedPtr);
830 }
831
832 return false;
833}
834
835void StagingBufferCollection::ReleaseAllBuffers()
836{
837 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
838 {
839 m_Bufs[i].Used = false;
840 }
841}
842
843static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
844{
845 StagingBufferCollection stagingBufs;
846
847 bool cmdBufferStarted = false;
848 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
849 {
850 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
851 if(currAllocInfo.m_Buffer)
852 {
853 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
854
855 VkBuffer stagingBuf = VK_NULL_HANDLE;
856 void* stagingBufMappedPtr = nullptr;
857 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
858 {
859 TEST(cmdBufferStarted);
860 EndSingleTimeCommands();
861 stagingBufs.ReleaseAllBuffers();
862 cmdBufferStarted = false;
863
864 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
865 TEST(ok);
866 }
867
868 // Fill staging buffer.
869 {
870 assert(size % sizeof(uint32_t) == 0);
871 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
872 uint32_t val = currAllocInfo.m_StartValue;
873 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
874 {
875 *stagingValPtr = val;
876 ++stagingValPtr;
877 ++val;
878 }
879 }
880
881 // Issue copy command from staging buffer to destination buffer.
882 if(!cmdBufferStarted)
883 {
884 cmdBufferStarted = true;
885 BeginSingleTimeCommands();
886 }
887
888 VkBufferCopy copy = {};
889 copy.srcOffset = 0;
890 copy.dstOffset = 0;
891 copy.size = size;
892 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
893 }
894 else
895 {
896 TEST(0 && "Images not currently supported.");
897 }
898 }
899
900 if(cmdBufferStarted)
901 {
902 EndSingleTimeCommands();
903 stagingBufs.ReleaseAllBuffers();
904 }
905}
906
907static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
908{
909 StagingBufferCollection stagingBufs;
910
911 bool cmdBufferStarted = false;
912 size_t validateAllocIndexOffset = 0;
913 std::vector<void*> validateStagingBuffers;
914 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
915 {
916 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
917 if(currAllocInfo.m_Buffer)
918 {
919 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
920
921 VkBuffer stagingBuf = VK_NULL_HANDLE;
922 void* stagingBufMappedPtr = nullptr;
923 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
924 {
925 TEST(cmdBufferStarted);
926 EndSingleTimeCommands();
927 cmdBufferStarted = false;
928
929 for(size_t validateIndex = 0;
930 validateIndex < validateStagingBuffers.size();
931 ++validateIndex)
932 {
933 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
934 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
935 TEST(validateSize % sizeof(uint32_t) == 0);
936 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
937 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
938 bool valid = true;
939 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
940 {
941 if(*stagingValPtr != val)
942 {
943 valid = false;
944 break;
945 }
946 ++stagingValPtr;
947 ++val;
948 }
949 TEST(valid);
950 }
951
952 stagingBufs.ReleaseAllBuffers();
953
954 validateAllocIndexOffset = allocInfoIndex;
955 validateStagingBuffers.clear();
956
957 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
958 TEST(ok);
959 }
960
961 // Issue copy command from staging buffer to destination buffer.
962 if(!cmdBufferStarted)
963 {
964 cmdBufferStarted = true;
965 BeginSingleTimeCommands();
966 }
967
968 VkBufferCopy copy = {};
969 copy.srcOffset = 0;
970 copy.dstOffset = 0;
971 copy.size = size;
972 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
973
974 // Sava mapped pointer for later validation.
975 validateStagingBuffers.push_back(stagingBufMappedPtr);
976 }
977 else
978 {
979 TEST(0 && "Images not currently supported.");
980 }
981 }
982
983 if(cmdBufferStarted)
984 {
985 EndSingleTimeCommands();
986
987 for(size_t validateIndex = 0;
988 validateIndex < validateStagingBuffers.size();
989 ++validateIndex)
990 {
991 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
992 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
993 TEST(validateSize % sizeof(uint32_t) == 0);
994 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
995 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
996 bool valid = true;
997 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
998 {
999 if(*stagingValPtr != val)
1000 {
1001 valid = false;
1002 break;
1003 }
1004 ++stagingValPtr;
1005 ++val;
1006 }
1007 TEST(valid);
1008 }
1009
1010 stagingBufs.ReleaseAllBuffers();
1011 }
1012}
1013
Adam Sawickib8333fb2018-03-13 16:15:53 +01001014static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1015{
1016 outMemReq = {};
1017 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1018 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1019}
1020
1021static void CreateBuffer(
1022 VmaPool pool,
1023 const VkBufferCreateInfo& bufCreateInfo,
1024 bool persistentlyMapped,
1025 AllocInfo& outAllocInfo)
1026{
1027 outAllocInfo = {};
1028 outAllocInfo.m_BufferInfo = bufCreateInfo;
1029
1030 VmaAllocationCreateInfo allocCreateInfo = {};
1031 allocCreateInfo.pool = pool;
1032 if(persistentlyMapped)
1033 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1034
1035 VmaAllocationInfo vmaAllocInfo = {};
1036 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1037
1038 // Setup StartValue and fill.
1039 {
1040 outAllocInfo.m_StartValue = (uint32_t)rand();
1041 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001042 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001043 if(!persistentlyMapped)
1044 {
1045 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1046 }
1047
1048 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001049 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001050 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1051 data[i] = value++;
1052
1053 if(!persistentlyMapped)
1054 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1055 }
1056}
1057
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001058static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001059{
1060 outAllocation.m_Allocation = nullptr;
1061 outAllocation.m_Buffer = nullptr;
1062 outAllocation.m_Image = nullptr;
1063 outAllocation.m_StartValue = (uint32_t)rand();
1064
1065 VmaAllocationCreateInfo vmaMemReq;
1066 GetMemReq(vmaMemReq);
1067
1068 VmaAllocationInfo allocInfo;
1069
1070 const bool isBuffer = true;//(rand() & 0x1) != 0;
1071 const bool isLarge = (rand() % 16) == 0;
1072 if(isBuffer)
1073 {
1074 const uint32_t bufferSize = isLarge ?
1075 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1076 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1077
1078 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1079 bufferInfo.size = bufferSize;
1080 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1081
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001082 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001084 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001085 }
1086 else
1087 {
1088 const uint32_t imageSizeX = isLarge ?
1089 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1090 rand() % 1024 + 1; // 1 ... 1024
1091 const uint32_t imageSizeY = isLarge ?
1092 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1093 rand() % 1024 + 1; // 1 ... 1024
1094
1095 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1096 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1097 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1098 imageInfo.extent.width = imageSizeX;
1099 imageInfo.extent.height = imageSizeY;
1100 imageInfo.extent.depth = 1;
1101 imageInfo.mipLevels = 1;
1102 imageInfo.arrayLayers = 1;
1103 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1104 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1105 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1106 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1107
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001108 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001109 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001110 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001111 }
1112
1113 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1114 if(allocInfo.pMappedData == nullptr)
1115 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001116 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001117 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001118 }
1119
1120 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001121 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001122 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1123 data[i] = value++;
1124
1125 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001126 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001127}
1128
1129static void DestroyAllocation(const AllocInfo& allocation)
1130{
1131 if(allocation.m_Buffer)
1132 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1133 else
1134 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1135}
1136
1137static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1138{
1139 for(size_t i = allocations.size(); i--; )
1140 DestroyAllocation(allocations[i]);
1141 allocations.clear();
1142}
1143
1144static void ValidateAllocationData(const AllocInfo& allocation)
1145{
1146 VmaAllocationInfo allocInfo;
1147 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1148
1149 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1150 if(allocInfo.pMappedData == nullptr)
1151 {
1152 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001153 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001154 }
1155
1156 uint32_t value = allocation.m_StartValue;
1157 bool ok = true;
1158 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001159 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001160 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1161 {
1162 if(data[i] != value++)
1163 {
1164 ok = false;
1165 break;
1166 }
1167 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001168 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001169
1170 if(allocInfo.pMappedData == nullptr)
1171 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1172}
1173
1174static void RecreateAllocationResource(AllocInfo& allocation)
1175{
1176 VmaAllocationInfo allocInfo;
1177 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1178
1179 if(allocation.m_Buffer)
1180 {
1181 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1182
1183 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001184 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001185
1186 // Just to silence validation layer warnings.
1187 VkMemoryRequirements vkMemReq;
1188 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001189 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001190
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001191 res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001192 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001193 }
1194 else
1195 {
1196 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1197
1198 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001199 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001200
1201 // Just to silence validation layer warnings.
1202 VkMemoryRequirements vkMemReq;
1203 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1204
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001205 res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001206 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001207 }
1208}
1209
1210static void Defragment(AllocInfo* allocs, size_t allocCount,
1211 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1212 VmaDefragmentationStats* defragmentationStats = nullptr)
1213{
1214 std::vector<VmaAllocation> vmaAllocs(allocCount);
1215 for(size_t i = 0; i < allocCount; ++i)
1216 vmaAllocs[i] = allocs[i].m_Allocation;
1217
1218 std::vector<VkBool32> allocChanged(allocCount);
1219
1220 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1221 defragmentationInfo, defragmentationStats) );
1222
1223 for(size_t i = 0; i < allocCount; ++i)
1224 {
1225 if(allocChanged[i])
1226 {
1227 RecreateAllocationResource(allocs[i]);
1228 }
1229 }
1230}
1231
1232static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1233{
1234 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1235 ValidateAllocationData(allocInfo);
1236 });
1237}
1238
1239void TestDefragmentationSimple()
1240{
1241 wprintf(L"Test defragmentation simple\n");
1242
1243 RandomNumberGenerator rand(667);
1244
1245 const VkDeviceSize BUF_SIZE = 0x10000;
1246 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1247
1248 const VkDeviceSize MIN_BUF_SIZE = 32;
1249 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1250 auto RandomBufSize = [&]() -> VkDeviceSize {
1251 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1252 };
1253
1254 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1255 bufCreateInfo.size = BUF_SIZE;
1256 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1257
1258 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1259 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1260
1261 uint32_t memTypeIndex = UINT32_MAX;
1262 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1263
1264 VmaPoolCreateInfo poolCreateInfo = {};
1265 poolCreateInfo.blockSize = BLOCK_SIZE;
1266 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1267
1268 VmaPool pool;
1269 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1270
Adam Sawickie1681912018-11-23 17:50:12 +01001271 // Defragmentation of empty pool.
1272 {
1273 VmaDefragmentationInfo2 defragInfo = {};
1274 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1275 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1276 defragInfo.poolCount = 1;
1277 defragInfo.pPools = &pool;
1278
1279 VmaDefragmentationStats defragStats = {};
1280 VmaDefragmentationContext defragCtx = nullptr;
1281 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1282 TEST(res >= VK_SUCCESS);
1283 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1284 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1285 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1286 }
1287
Adam Sawickib8333fb2018-03-13 16:15:53 +01001288 std::vector<AllocInfo> allocations;
1289
1290 // persistentlyMappedOption = 0 - not persistently mapped.
1291 // persistentlyMappedOption = 1 - persistently mapped.
1292 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1293 {
1294 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1295 const bool persistentlyMapped = persistentlyMappedOption != 0;
1296
1297 // # Test 1
1298 // Buffers of fixed size.
1299 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1300 // Expected result: at least 1 block freed.
1301 {
1302 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1303 {
1304 AllocInfo allocInfo;
1305 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1306 allocations.push_back(allocInfo);
1307 }
1308
1309 for(size_t i = 1; i < allocations.size(); ++i)
1310 {
1311 DestroyAllocation(allocations[i]);
1312 allocations.erase(allocations.begin() + i);
1313 }
1314
1315 VmaDefragmentationStats defragStats;
1316 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001317 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1318 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001319
1320 ValidateAllocationsData(allocations.data(), allocations.size());
1321
1322 DestroyAllAllocations(allocations);
1323 }
1324
1325 // # Test 2
1326 // Buffers of fixed size.
1327 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1328 // Expected result: Each of 4 interations makes some progress.
1329 {
1330 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1331 {
1332 AllocInfo allocInfo;
1333 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1334 allocations.push_back(allocInfo);
1335 }
1336
1337 for(size_t i = 1; i < allocations.size(); ++i)
1338 {
1339 DestroyAllocation(allocations[i]);
1340 allocations.erase(allocations.begin() + i);
1341 }
1342
1343 VmaDefragmentationInfo defragInfo = {};
1344 defragInfo.maxAllocationsToMove = 1;
1345 defragInfo.maxBytesToMove = BUF_SIZE;
1346
1347 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1348 {
1349 VmaDefragmentationStats defragStats;
1350 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001351 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001352 }
1353
1354 ValidateAllocationsData(allocations.data(), allocations.size());
1355
1356 DestroyAllAllocations(allocations);
1357 }
1358
1359 // # Test 3
1360 // Buffers of variable size.
1361 // Create a number of buffers. Remove some percent of them.
1362 // Defragment while having some percent of them unmovable.
1363 // Expected result: Just simple validation.
1364 {
1365 for(size_t i = 0; i < 100; ++i)
1366 {
1367 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1368 localBufCreateInfo.size = RandomBufSize();
1369
1370 AllocInfo allocInfo;
1371 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1372 allocations.push_back(allocInfo);
1373 }
1374
1375 const uint32_t percentToDelete = 60;
1376 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1377 for(size_t i = 0; i < numberToDelete; ++i)
1378 {
1379 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1380 DestroyAllocation(allocations[indexToDelete]);
1381 allocations.erase(allocations.begin() + indexToDelete);
1382 }
1383
1384 // Non-movable allocations will be at the beginning of allocations array.
1385 const uint32_t percentNonMovable = 20;
1386 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1387 for(size_t i = 0; i < numberNonMovable; ++i)
1388 {
1389 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1390 if(indexNonMovable != i)
1391 std::swap(allocations[i], allocations[indexNonMovable]);
1392 }
1393
1394 VmaDefragmentationStats defragStats;
1395 Defragment(
1396 allocations.data() + numberNonMovable,
1397 allocations.size() - numberNonMovable,
1398 nullptr, &defragStats);
1399
1400 ValidateAllocationsData(allocations.data(), allocations.size());
1401
1402 DestroyAllAllocations(allocations);
1403 }
1404 }
1405
Adam Sawicki647cf242018-11-23 17:58:00 +01001406 /*
1407 Allocation that must be move to an overlapping place using memmove().
1408 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1409 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001410 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001411 {
1412 AllocInfo allocInfo[2];
1413
1414 bufCreateInfo.size = BUF_SIZE;
1415 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1416 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1417 bufCreateInfo.size = biggerBufSize;
1418 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1419
1420 DestroyAllocation(allocInfo[0]);
1421
1422 VmaDefragmentationStats defragStats;
1423 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1424 // If this fails, it means we couldn't do memmove with overlapping regions.
1425 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1426
1427 ValidateAllocationsData(&allocInfo[1], 1);
1428 DestroyAllocation(allocInfo[1]);
1429 }
1430
Adam Sawickib8333fb2018-03-13 16:15:53 +01001431 vmaDestroyPool(g_hAllocator, pool);
1432}
1433
Adam Sawicki52076eb2018-11-22 16:14:50 +01001434void TestDefragmentationWholePool()
1435{
1436 wprintf(L"Test defragmentation whole pool\n");
1437
1438 RandomNumberGenerator rand(668);
1439
1440 const VkDeviceSize BUF_SIZE = 0x10000;
1441 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1442
1443 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1444 bufCreateInfo.size = BUF_SIZE;
1445 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1446
1447 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1448 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1449
1450 uint32_t memTypeIndex = UINT32_MAX;
1451 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1452
1453 VmaPoolCreateInfo poolCreateInfo = {};
1454 poolCreateInfo.blockSize = BLOCK_SIZE;
1455 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1456
1457 VmaDefragmentationStats defragStats[2];
1458 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1459 {
1460 VmaPool pool;
1461 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1462
1463 std::vector<AllocInfo> allocations;
1464
1465 // Buffers of fixed size.
1466 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1467 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1468 {
1469 AllocInfo allocInfo;
1470 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1471 allocations.push_back(allocInfo);
1472 }
1473
1474 for(size_t i = 1; i < allocations.size(); ++i)
1475 {
1476 DestroyAllocation(allocations[i]);
1477 allocations.erase(allocations.begin() + i);
1478 }
1479
1480 VmaDefragmentationInfo2 defragInfo = {};
1481 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1482 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1483 std::vector<VmaAllocation> allocationsToDefrag;
1484 if(caseIndex == 0)
1485 {
1486 defragInfo.poolCount = 1;
1487 defragInfo.pPools = &pool;
1488 }
1489 else
1490 {
1491 const size_t allocCount = allocations.size();
1492 allocationsToDefrag.resize(allocCount);
1493 std::transform(
1494 allocations.begin(), allocations.end(),
1495 allocationsToDefrag.begin(),
1496 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1497 defragInfo.allocationCount = (uint32_t)allocCount;
1498 defragInfo.pAllocations = allocationsToDefrag.data();
1499 }
1500
1501 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1502 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1503 TEST(res >= VK_SUCCESS);
1504 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1505
1506 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1507
1508 ValidateAllocationsData(allocations.data(), allocations.size());
1509
1510 DestroyAllAllocations(allocations);
1511
1512 vmaDestroyPool(g_hAllocator, pool);
1513 }
1514
1515 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1516 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1517 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1518 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1519}
1520
Adam Sawickib8333fb2018-03-13 16:15:53 +01001521void TestDefragmentationFull()
1522{
1523 std::vector<AllocInfo> allocations;
1524
1525 // Create initial allocations.
1526 for(size_t i = 0; i < 400; ++i)
1527 {
1528 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001529 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001530 allocations.push_back(allocation);
1531 }
1532
1533 // Delete random allocations
1534 const size_t allocationsToDeletePercent = 80;
1535 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1536 for(size_t i = 0; i < allocationsToDelete; ++i)
1537 {
1538 size_t index = (size_t)rand() % allocations.size();
1539 DestroyAllocation(allocations[index]);
1540 allocations.erase(allocations.begin() + index);
1541 }
1542
1543 for(size_t i = 0; i < allocations.size(); ++i)
1544 ValidateAllocationData(allocations[i]);
1545
Adam Sawicki0667e332018-08-24 17:26:44 +02001546 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001547
1548 {
1549 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1550 for(size_t i = 0; i < allocations.size(); ++i)
1551 vmaAllocations[i] = allocations[i].m_Allocation;
1552
1553 const size_t nonMovablePercent = 0;
1554 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1555 for(size_t i = 0; i < nonMovableCount; ++i)
1556 {
1557 size_t index = (size_t)rand() % vmaAllocations.size();
1558 vmaAllocations.erase(vmaAllocations.begin() + index);
1559 }
1560
1561 const uint32_t defragCount = 1;
1562 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1563 {
1564 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1565
1566 VmaDefragmentationInfo defragmentationInfo;
1567 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1568 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1569
1570 wprintf(L"Defragmentation #%u\n", defragIndex);
1571
1572 time_point begTime = std::chrono::high_resolution_clock::now();
1573
1574 VmaDefragmentationStats stats;
1575 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001576 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001577
1578 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1579
1580 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1581 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1582 wprintf(L"Time: %.2f s\n", defragmentDuration);
1583
1584 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1585 {
1586 if(allocationsChanged[i])
1587 {
1588 RecreateAllocationResource(allocations[i]);
1589 }
1590 }
1591
1592 for(size_t i = 0; i < allocations.size(); ++i)
1593 ValidateAllocationData(allocations[i]);
1594
Adam Sawicki0667e332018-08-24 17:26:44 +02001595 //wchar_t fileName[MAX_PATH];
1596 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1597 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001598 }
1599 }
1600
1601 // Destroy all remaining allocations.
1602 DestroyAllAllocations(allocations);
1603}
1604
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001605static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001606{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001607 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001608 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001609
1610 std::vector<AllocInfo> allocations;
1611
1612 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001613 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1614 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001615 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001616 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1617 const size_t percentToLeave = 30;
1618 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001619 RandomNumberGenerator rand = { 234522 };
1620
1621 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001622
1623 VmaAllocationCreateInfo allocCreateInfo = {};
1624 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001625 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001626
1627 // Create all intended buffers.
1628 for(size_t i = 0; i < bufCount; ++i)
1629 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001630 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1631
1632 if(rand.Generate() % 100 < percentNonMovable)
1633 {
1634 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1635 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1636 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1637 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1638 }
1639 else
1640 {
1641 // Different usage just to see different color in output from VmaDumpVis.
1642 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1643 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1644 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1645 // And in JSON dump.
1646 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1647 }
1648
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001649 AllocInfo alloc;
1650 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1651 alloc.m_StartValue = rand.Generate();
1652 allocations.push_back(alloc);
1653 }
1654
1655 // Destroy some percentage of them.
1656 {
1657 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1658 for(size_t i = 0; i < buffersToDestroy; ++i)
1659 {
1660 const size_t index = rand.Generate() % allocations.size();
1661 allocations[index].Destroy();
1662 allocations.erase(allocations.begin() + index);
1663 }
1664 }
1665
1666 // Fill them with meaningful data.
1667 UploadGpuData(allocations.data(), allocations.size());
1668
Adam Sawickic6ede152018-11-16 17:04:14 +01001669 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001670 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001671 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001672
1673 // Defragment using GPU only.
1674 {
1675 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001676
Adam Sawickic6ede152018-11-16 17:04:14 +01001677 std::vector<VmaAllocation> allocationPtrs;
1678 std::vector<VkBool32> allocationChanged;
1679 std::vector<size_t> allocationOriginalIndex;
1680
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001681 for(size_t i = 0; i < allocCount; ++i)
1682 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001683 VmaAllocationInfo allocInfo = {};
1684 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1685 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1686 {
1687 allocationPtrs.push_back(allocations[i].m_Allocation);
1688 allocationChanged.push_back(VK_FALSE);
1689 allocationOriginalIndex.push_back(i);
1690 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001691 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001692
1693 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001694
1695 BeginSingleTimeCommands();
1696
1697 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001698 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001699 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001700 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001701 defragInfo.pAllocationsChanged = allocationChanged.data();
1702 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001703 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1704 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1705
1706 VmaDefragmentationStats stats = {};
1707 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1708 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1709 TEST(res >= VK_SUCCESS);
1710
1711 EndSingleTimeCommands();
1712
1713 vmaDefragmentationEnd(g_hAllocator, ctx);
1714
Adam Sawickic6ede152018-11-16 17:04:14 +01001715 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001716 {
1717 if(allocationChanged[i])
1718 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001719 const size_t origAllocIndex = allocationOriginalIndex[i];
1720 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001721 }
1722 }
1723
Adam Sawicki4d844e22019-01-24 16:21:05 +01001724 // If corruption detection is enabled, GPU defragmentation may not work on
1725 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001726 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001727 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1728 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001729 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001730 }
1731
1732 ValidateGpuData(allocations.data(), allocations.size());
1733
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001734 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001735 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001736
1737 // Destroy all remaining buffers.
1738 for(size_t i = allocations.size(); i--; )
1739 {
1740 allocations[i].Destroy();
1741 }
Adam Sawicki05704002018-11-08 16:07:29 +01001742
1743 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001744}
1745
Adam Sawickib8333fb2018-03-13 16:15:53 +01001746static void TestUserData()
1747{
1748 VkResult res;
1749
1750 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1751 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1752 bufCreateInfo.size = 0x10000;
1753
1754 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1755 {
1756 // Opaque pointer
1757 {
1758
1759 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1760 void* pointerToSomething = &res;
1761
1762 VmaAllocationCreateInfo allocCreateInfo = {};
1763 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1764 allocCreateInfo.pUserData = numberAsPointer;
1765 if(testIndex == 1)
1766 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1767
1768 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1769 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001770 TEST(res == VK_SUCCESS);
1771 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001772
1773 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001774 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001775
1776 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1777 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001778 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001779
1780 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1781 }
1782
1783 // String
1784 {
1785 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1786 const char* name2 = "2";
1787 const size_t name1Len = strlen(name1);
1788
1789 char* name1Buf = new char[name1Len + 1];
1790 strcpy_s(name1Buf, name1Len + 1, name1);
1791
1792 VmaAllocationCreateInfo allocCreateInfo = {};
1793 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1794 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1795 allocCreateInfo.pUserData = name1Buf;
1796 if(testIndex == 1)
1797 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1798
1799 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1800 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001801 TEST(res == VK_SUCCESS);
1802 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1803 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001804
1805 delete[] name1Buf;
1806
1807 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001808 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001809
1810 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1811 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001812 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001813
1814 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1815 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001816 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001817
1818 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1819 }
1820 }
1821}
1822
Adam Sawicki370ab182018-11-08 16:31:00 +01001823static void TestInvalidAllocations()
1824{
1825 VkResult res;
1826
1827 VmaAllocationCreateInfo allocCreateInfo = {};
1828 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1829
1830 // Try to allocate 0 bytes.
1831 {
1832 VkMemoryRequirements memReq = {};
1833 memReq.size = 0; // !!!
1834 memReq.alignment = 4;
1835 memReq.memoryTypeBits = UINT32_MAX;
1836 VmaAllocation alloc = VK_NULL_HANDLE;
1837 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1838 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1839 }
1840
1841 // Try to create buffer with size = 0.
1842 {
1843 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1844 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1845 bufCreateInfo.size = 0; // !!!
1846 VkBuffer buf = VK_NULL_HANDLE;
1847 VmaAllocation alloc = VK_NULL_HANDLE;
1848 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1849 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1850 }
1851
1852 // Try to create image with one dimension = 0.
1853 {
1854 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1855 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1856 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1857 imageCreateInfo.extent.width = 128;
1858 imageCreateInfo.extent.height = 0; // !!!
1859 imageCreateInfo.extent.depth = 1;
1860 imageCreateInfo.mipLevels = 1;
1861 imageCreateInfo.arrayLayers = 1;
1862 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1863 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1864 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1865 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1866 VkImage image = VK_NULL_HANDLE;
1867 VmaAllocation alloc = VK_NULL_HANDLE;
1868 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1869 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1870 }
1871}
1872
Adam Sawickib8333fb2018-03-13 16:15:53 +01001873static void TestMemoryRequirements()
1874{
1875 VkResult res;
1876 VkBuffer buf;
1877 VmaAllocation alloc;
1878 VmaAllocationInfo allocInfo;
1879
1880 const VkPhysicalDeviceMemoryProperties* memProps;
1881 vmaGetMemoryProperties(g_hAllocator, &memProps);
1882
1883 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1884 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1885 bufInfo.size = 128;
1886
1887 VmaAllocationCreateInfo allocCreateInfo = {};
1888
1889 // No requirements.
1890 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001891 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001892 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1893
1894 // Usage.
1895 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1896 allocCreateInfo.requiredFlags = 0;
1897 allocCreateInfo.preferredFlags = 0;
1898 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1899
1900 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001901 TEST(res == VK_SUCCESS);
1902 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001903 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1904
1905 // Required flags, preferred flags.
1906 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1907 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1908 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1909 allocCreateInfo.memoryTypeBits = 0;
1910
1911 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001912 TEST(res == VK_SUCCESS);
1913 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1914 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001915 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1916
1917 // memoryTypeBits.
1918 const uint32_t memType = allocInfo.memoryType;
1919 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1920 allocCreateInfo.requiredFlags = 0;
1921 allocCreateInfo.preferredFlags = 0;
1922 allocCreateInfo.memoryTypeBits = 1u << memType;
1923
1924 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001925 TEST(res == VK_SUCCESS);
1926 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001927 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1928
1929}
1930
1931static void TestBasics()
1932{
1933 VkResult res;
1934
1935 TestMemoryRequirements();
1936
1937 // Lost allocation
1938 {
1939 VmaAllocation alloc = VK_NULL_HANDLE;
1940 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001941 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001942
1943 VmaAllocationInfo allocInfo;
1944 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001945 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1946 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001947
1948 vmaFreeMemory(g_hAllocator, alloc);
1949 }
1950
1951 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1952 {
1953 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1954 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1955 bufCreateInfo.size = 128;
1956
1957 VmaAllocationCreateInfo allocCreateInfo = {};
1958 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1959 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1960
1961 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1962 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001963 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001964
1965 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1966
1967 // Same with OWN_MEMORY.
1968 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1969
1970 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001971 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001972
1973 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1974 }
1975
1976 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001977
1978 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001979}
1980
1981void TestHeapSizeLimit()
1982{
1983 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1984 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1985
1986 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1987 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1988 {
1989 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1990 }
1991
1992 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1993 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1994 allocatorCreateInfo.device = g_hDevice;
1995 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1996
1997 VmaAllocator hAllocator;
1998 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001999 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002000
2001 struct Item
2002 {
2003 VkBuffer hBuf;
2004 VmaAllocation hAlloc;
2005 };
2006 std::vector<Item> items;
2007
2008 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2009 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2010
2011 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
2012 VmaAllocationInfo ownAllocInfo;
2013 {
2014 VmaAllocationCreateInfo allocCreateInfo = {};
2015 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2016 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2017
2018 bufCreateInfo.size = BLOCK_SIZE / 2;
2019
2020 for(size_t i = 0; i < 2; ++i)
2021 {
2022 Item item;
2023 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002024 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002025 items.push_back(item);
2026 }
2027 }
2028
2029 // Create pool to make sure allocations must be out of this memory type.
2030 VmaPoolCreateInfo poolCreateInfo = {};
2031 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
2032 poolCreateInfo.blockSize = BLOCK_SIZE;
2033
2034 VmaPool hPool;
2035 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002036 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002037
2038 // 2. Allocate normal buffers from all the remaining memory.
2039 {
2040 VmaAllocationCreateInfo allocCreateInfo = {};
2041 allocCreateInfo.pool = hPool;
2042
2043 bufCreateInfo.size = BLOCK_SIZE / 2;
2044
2045 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2046 for(size_t i = 0; i < bufCount; ++i)
2047 {
2048 Item item;
2049 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002050 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002051 items.push_back(item);
2052 }
2053 }
2054
2055 // 3. Allocation of one more (even small) buffer should fail.
2056 {
2057 VmaAllocationCreateInfo allocCreateInfo = {};
2058 allocCreateInfo.pool = hPool;
2059
2060 bufCreateInfo.size = 128;
2061
2062 VkBuffer hBuf;
2063 VmaAllocation hAlloc;
2064 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002065 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002066 }
2067
2068 // Destroy everything.
2069 for(size_t i = items.size(); i--; )
2070 {
2071 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2072 }
2073
2074 vmaDestroyPool(hAllocator, hPool);
2075
2076 vmaDestroyAllocator(hAllocator);
2077}
2078
Adam Sawicki212a4a62018-06-14 15:44:45 +02002079#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002080static void TestDebugMargin()
2081{
2082 if(VMA_DEBUG_MARGIN == 0)
2083 {
2084 return;
2085 }
2086
2087 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002088 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002089
2090 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002091 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002092
2093 // Create few buffers of different size.
2094 const size_t BUF_COUNT = 10;
2095 BufferInfo buffers[BUF_COUNT];
2096 VmaAllocationInfo allocInfo[BUF_COUNT];
2097 for(size_t i = 0; i < 10; ++i)
2098 {
2099 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002100 // Last one will be mapped.
2101 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002102
2103 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002104 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002105 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002106 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002107
2108 if(i == BUF_COUNT - 1)
2109 {
2110 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002111 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002112 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2113 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2114 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002115 }
2116
2117 // Check if their offsets preserve margin between them.
2118 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2119 {
2120 if(lhs.deviceMemory != rhs.deviceMemory)
2121 {
2122 return lhs.deviceMemory < rhs.deviceMemory;
2123 }
2124 return lhs.offset < rhs.offset;
2125 });
2126 for(size_t i = 1; i < BUF_COUNT; ++i)
2127 {
2128 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2129 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002130 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002131 }
2132 }
2133
Adam Sawicki212a4a62018-06-14 15:44:45 +02002134 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002135 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002136
Adam Sawicki73b16652018-06-11 16:39:25 +02002137 // Destroy all buffers.
2138 for(size_t i = BUF_COUNT; i--; )
2139 {
2140 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2141 }
2142}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002143#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002144
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002145static void TestLinearAllocator()
2146{
2147 wprintf(L"Test linear allocator\n");
2148
2149 RandomNumberGenerator rand{645332};
2150
2151 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2152 sampleBufCreateInfo.size = 1024; // Whatever.
2153 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2154
2155 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2156 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2157
2158 VmaPoolCreateInfo poolCreateInfo = {};
2159 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002160 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002161
Adam Sawickiee082772018-06-20 17:45:49 +02002162 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002163 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2164 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2165
2166 VmaPool pool = nullptr;
2167 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002168 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002169
2170 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2171
2172 VmaAllocationCreateInfo allocCreateInfo = {};
2173 allocCreateInfo.pool = pool;
2174
2175 constexpr size_t maxBufCount = 100;
2176 std::vector<BufferInfo> bufInfo;
2177
2178 constexpr VkDeviceSize bufSizeMin = 16;
2179 constexpr VkDeviceSize bufSizeMax = 1024;
2180 VmaAllocationInfo allocInfo;
2181 VkDeviceSize prevOffset = 0;
2182
2183 // Test one-time free.
2184 for(size_t i = 0; i < 2; ++i)
2185 {
2186 // Allocate number of buffers of varying size that surely fit into this block.
2187 VkDeviceSize bufSumSize = 0;
2188 for(size_t i = 0; i < maxBufCount; ++i)
2189 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002190 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002191 BufferInfo newBufInfo;
2192 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2193 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002194 TEST(res == VK_SUCCESS);
2195 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002196 bufInfo.push_back(newBufInfo);
2197 prevOffset = allocInfo.offset;
2198 bufSumSize += bufCreateInfo.size;
2199 }
2200
2201 // Validate pool stats.
2202 VmaPoolStats stats;
2203 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002204 TEST(stats.size == poolCreateInfo.blockSize);
2205 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2206 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002207
2208 // Destroy the buffers in random order.
2209 while(!bufInfo.empty())
2210 {
2211 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2212 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2213 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2214 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2215 }
2216 }
2217
2218 // Test stack.
2219 {
2220 // Allocate number of buffers of varying size that surely fit into this block.
2221 for(size_t i = 0; i < maxBufCount; ++i)
2222 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002223 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002224 BufferInfo newBufInfo;
2225 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2226 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002227 TEST(res == VK_SUCCESS);
2228 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002229 bufInfo.push_back(newBufInfo);
2230 prevOffset = allocInfo.offset;
2231 }
2232
2233 // Destroy few buffers from top of the stack.
2234 for(size_t i = 0; i < maxBufCount / 5; ++i)
2235 {
2236 const BufferInfo& currBufInfo = bufInfo.back();
2237 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2238 bufInfo.pop_back();
2239 }
2240
2241 // Create some more
2242 for(size_t i = 0; i < maxBufCount / 5; ++i)
2243 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002244 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002245 BufferInfo newBufInfo;
2246 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2247 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002248 TEST(res == VK_SUCCESS);
2249 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002250 bufInfo.push_back(newBufInfo);
2251 prevOffset = allocInfo.offset;
2252 }
2253
2254 // Destroy the buffers in reverse order.
2255 while(!bufInfo.empty())
2256 {
2257 const BufferInfo& currBufInfo = bufInfo.back();
2258 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2259 bufInfo.pop_back();
2260 }
2261 }
2262
Adam Sawickiee082772018-06-20 17:45:49 +02002263 // Test ring buffer.
2264 {
2265 // Allocate number of buffers that surely fit into this block.
2266 bufCreateInfo.size = bufSizeMax;
2267 for(size_t i = 0; i < maxBufCount; ++i)
2268 {
2269 BufferInfo newBufInfo;
2270 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2271 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002272 TEST(res == VK_SUCCESS);
2273 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002274 bufInfo.push_back(newBufInfo);
2275 prevOffset = allocInfo.offset;
2276 }
2277
2278 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2279 const size_t buffersPerIter = maxBufCount / 10 - 1;
2280 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2281 for(size_t iter = 0; iter < iterCount; ++iter)
2282 {
2283 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2284 {
2285 const BufferInfo& currBufInfo = bufInfo.front();
2286 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2287 bufInfo.erase(bufInfo.begin());
2288 }
2289 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2290 {
2291 BufferInfo newBufInfo;
2292 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2293 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002294 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002295 bufInfo.push_back(newBufInfo);
2296 }
2297 }
2298
2299 // Allocate buffers until we reach out-of-memory.
2300 uint32_t debugIndex = 0;
2301 while(res == VK_SUCCESS)
2302 {
2303 BufferInfo newBufInfo;
2304 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2305 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2306 if(res == VK_SUCCESS)
2307 {
2308 bufInfo.push_back(newBufInfo);
2309 }
2310 else
2311 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002312 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002313 }
2314 ++debugIndex;
2315 }
2316
2317 // Destroy the buffers in random order.
2318 while(!bufInfo.empty())
2319 {
2320 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2321 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2322 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2323 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2324 }
2325 }
2326
Adam Sawicki680b2252018-08-22 14:47:32 +02002327 // Test double stack.
2328 {
2329 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2330 VkDeviceSize prevOffsetLower = 0;
2331 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2332 for(size_t i = 0; i < maxBufCount; ++i)
2333 {
2334 const bool upperAddress = (i % 2) != 0;
2335 if(upperAddress)
2336 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2337 else
2338 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002339 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002340 BufferInfo newBufInfo;
2341 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2342 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002343 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002344 if(upperAddress)
2345 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002346 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002347 prevOffsetUpper = allocInfo.offset;
2348 }
2349 else
2350 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002351 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002352 prevOffsetLower = allocInfo.offset;
2353 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002354 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002355 bufInfo.push_back(newBufInfo);
2356 }
2357
2358 // Destroy few buffers from top of the stack.
2359 for(size_t i = 0; i < maxBufCount / 5; ++i)
2360 {
2361 const BufferInfo& currBufInfo = bufInfo.back();
2362 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2363 bufInfo.pop_back();
2364 }
2365
2366 // Create some more
2367 for(size_t i = 0; i < maxBufCount / 5; ++i)
2368 {
2369 const bool upperAddress = (i % 2) != 0;
2370 if(upperAddress)
2371 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2372 else
2373 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002374 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002375 BufferInfo newBufInfo;
2376 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2377 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002378 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002379 bufInfo.push_back(newBufInfo);
2380 }
2381
2382 // Destroy the buffers in reverse order.
2383 while(!bufInfo.empty())
2384 {
2385 const BufferInfo& currBufInfo = bufInfo.back();
2386 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2387 bufInfo.pop_back();
2388 }
2389
2390 // Create buffers on both sides until we reach out of memory.
2391 prevOffsetLower = 0;
2392 prevOffsetUpper = poolCreateInfo.blockSize;
2393 res = VK_SUCCESS;
2394 for(size_t i = 0; res == VK_SUCCESS; ++i)
2395 {
2396 const bool upperAddress = (i % 2) != 0;
2397 if(upperAddress)
2398 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2399 else
2400 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002401 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002402 BufferInfo newBufInfo;
2403 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2404 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2405 if(res == VK_SUCCESS)
2406 {
2407 if(upperAddress)
2408 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002409 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002410 prevOffsetUpper = allocInfo.offset;
2411 }
2412 else
2413 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002414 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002415 prevOffsetLower = allocInfo.offset;
2416 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002417 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002418 bufInfo.push_back(newBufInfo);
2419 }
2420 }
2421
2422 // Destroy the buffers in random order.
2423 while(!bufInfo.empty())
2424 {
2425 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2426 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2427 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2428 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2429 }
2430
2431 // Create buffers on upper side only, constant size, until we reach out of memory.
2432 prevOffsetUpper = poolCreateInfo.blockSize;
2433 res = VK_SUCCESS;
2434 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2435 bufCreateInfo.size = bufSizeMax;
2436 for(size_t i = 0; res == VK_SUCCESS; ++i)
2437 {
2438 BufferInfo newBufInfo;
2439 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2440 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2441 if(res == VK_SUCCESS)
2442 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002443 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002444 prevOffsetUpper = allocInfo.offset;
2445 bufInfo.push_back(newBufInfo);
2446 }
2447 }
2448
2449 // Destroy the buffers in reverse order.
2450 while(!bufInfo.empty())
2451 {
2452 const BufferInfo& currBufInfo = bufInfo.back();
2453 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2454 bufInfo.pop_back();
2455 }
2456 }
2457
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002458 // Test ring buffer with lost allocations.
2459 {
2460 // Allocate number of buffers until pool is full.
2461 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2462 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2463 res = VK_SUCCESS;
2464 for(size_t i = 0; res == VK_SUCCESS; ++i)
2465 {
2466 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2467
Adam Sawickifd366b62019-01-24 15:26:43 +01002468 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002469
2470 BufferInfo newBufInfo;
2471 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2472 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2473 if(res == VK_SUCCESS)
2474 bufInfo.push_back(newBufInfo);
2475 }
2476
2477 // Free first half of it.
2478 {
2479 const size_t buffersToDelete = bufInfo.size() / 2;
2480 for(size_t i = 0; i < buffersToDelete; ++i)
2481 {
2482 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2483 }
2484 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2485 }
2486
2487 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002488 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002489 res = VK_SUCCESS;
2490 for(size_t i = 0; res == VK_SUCCESS; ++i)
2491 {
2492 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2493
Adam Sawickifd366b62019-01-24 15:26:43 +01002494 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002495
2496 BufferInfo newBufInfo;
2497 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2498 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2499 if(res == VK_SUCCESS)
2500 bufInfo.push_back(newBufInfo);
2501 }
2502
2503 VkDeviceSize firstNewOffset;
2504 {
2505 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2506
2507 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2508 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2509 bufCreateInfo.size = bufSizeMax;
2510
2511 BufferInfo newBufInfo;
2512 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2513 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002514 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002515 bufInfo.push_back(newBufInfo);
2516 firstNewOffset = allocInfo.offset;
2517
2518 // Make sure at least one buffer from the beginning became lost.
2519 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002520 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002521 }
2522
Adam Sawickifd366b62019-01-24 15:26:43 +01002523#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002524 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2525 size_t newCount = 1;
2526 for(;;)
2527 {
2528 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2529
Adam Sawickifd366b62019-01-24 15:26:43 +01002530 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002531
2532 BufferInfo newBufInfo;
2533 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2534 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01002535
Adam Sawickib8d34d52018-10-03 17:41:20 +02002536 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002537 bufInfo.push_back(newBufInfo);
2538 ++newCount;
2539 if(allocInfo.offset < firstNewOffset)
2540 break;
2541 }
Adam Sawickifd366b62019-01-24 15:26:43 +01002542#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002543
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002544 // Delete buffers that are lost.
2545 for(size_t i = bufInfo.size(); i--; )
2546 {
2547 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2548 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2549 {
2550 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2551 bufInfo.erase(bufInfo.begin() + i);
2552 }
2553 }
2554
2555 // Test vmaMakePoolAllocationsLost
2556 {
2557 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2558
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01002559 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002560 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002561 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002562
2563 size_t realLostAllocCount = 0;
2564 for(size_t i = 0; i < bufInfo.size(); ++i)
2565 {
2566 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2567 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2568 ++realLostAllocCount;
2569 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002570 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002571 }
2572
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002573 // Destroy all the buffers in forward order.
2574 for(size_t i = 0; i < bufInfo.size(); ++i)
2575 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2576 bufInfo.clear();
2577 }
2578
Adam Sawicki70a683e2018-08-24 15:36:32 +02002579 vmaDestroyPool(g_hAllocator, pool);
2580}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002581
Adam Sawicki70a683e2018-08-24 15:36:32 +02002582static void TestLinearAllocatorMultiBlock()
2583{
2584 wprintf(L"Test linear allocator multi block\n");
2585
2586 RandomNumberGenerator rand{345673};
2587
2588 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2589 sampleBufCreateInfo.size = 1024 * 1024;
2590 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2591
2592 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2593 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2594
2595 VmaPoolCreateInfo poolCreateInfo = {};
2596 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2597 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002598 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002599
2600 VmaPool pool = nullptr;
2601 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002602 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002603
2604 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2605
2606 VmaAllocationCreateInfo allocCreateInfo = {};
2607 allocCreateInfo.pool = pool;
2608
2609 std::vector<BufferInfo> bufInfo;
2610 VmaAllocationInfo allocInfo;
2611
2612 // Test one-time free.
2613 {
2614 // Allocate buffers until we move to a second block.
2615 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2616 for(uint32_t i = 0; ; ++i)
2617 {
2618 BufferInfo newBufInfo;
2619 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2620 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002621 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002622 bufInfo.push_back(newBufInfo);
2623 if(lastMem && allocInfo.deviceMemory != lastMem)
2624 {
2625 break;
2626 }
2627 lastMem = allocInfo.deviceMemory;
2628 }
2629
Adam Sawickib8d34d52018-10-03 17:41:20 +02002630 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002631
2632 // Make sure that pool has now two blocks.
2633 VmaPoolStats poolStats = {};
2634 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002635 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002636
2637 // Destroy all the buffers in random order.
2638 while(!bufInfo.empty())
2639 {
2640 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2641 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2642 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2643 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2644 }
2645
2646 // Make sure that pool has now at most one block.
2647 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002648 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002649 }
2650
2651 // Test stack.
2652 {
2653 // Allocate buffers until we move to a second block.
2654 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2655 for(uint32_t i = 0; ; ++i)
2656 {
2657 BufferInfo newBufInfo;
2658 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2659 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002660 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002661 bufInfo.push_back(newBufInfo);
2662 if(lastMem && allocInfo.deviceMemory != lastMem)
2663 {
2664 break;
2665 }
2666 lastMem = allocInfo.deviceMemory;
2667 }
2668
Adam Sawickib8d34d52018-10-03 17:41:20 +02002669 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002670
2671 // Add few more buffers.
2672 for(uint32_t i = 0; i < 5; ++i)
2673 {
2674 BufferInfo newBufInfo;
2675 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2676 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002677 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002678 bufInfo.push_back(newBufInfo);
2679 }
2680
2681 // Make sure that pool has now two blocks.
2682 VmaPoolStats poolStats = {};
2683 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002684 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002685
2686 // Delete half of buffers, LIFO.
2687 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2688 {
2689 const BufferInfo& currBufInfo = bufInfo.back();
2690 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2691 bufInfo.pop_back();
2692 }
2693
2694 // Add one more buffer.
2695 BufferInfo newBufInfo;
2696 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2697 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002698 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002699 bufInfo.push_back(newBufInfo);
2700
2701 // Make sure that pool has now one block.
2702 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002703 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002704
2705 // Delete all the remaining buffers, LIFO.
2706 while(!bufInfo.empty())
2707 {
2708 const BufferInfo& currBufInfo = bufInfo.back();
2709 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2710 bufInfo.pop_back();
2711 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002712 }
2713
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002714 vmaDestroyPool(g_hAllocator, pool);
2715}
2716
Adam Sawickifd11d752018-08-22 15:02:10 +02002717static void ManuallyTestLinearAllocator()
2718{
2719 VmaStats origStats;
2720 vmaCalculateStats(g_hAllocator, &origStats);
2721
2722 wprintf(L"Manually test linear allocator\n");
2723
2724 RandomNumberGenerator rand{645332};
2725
2726 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2727 sampleBufCreateInfo.size = 1024; // Whatever.
2728 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2729
2730 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2731 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2732
2733 VmaPoolCreateInfo poolCreateInfo = {};
2734 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002735 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002736
2737 poolCreateInfo.blockSize = 10 * 1024;
2738 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2739 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2740
2741 VmaPool pool = nullptr;
2742 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002743 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002744
2745 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2746
2747 VmaAllocationCreateInfo allocCreateInfo = {};
2748 allocCreateInfo.pool = pool;
2749
2750 std::vector<BufferInfo> bufInfo;
2751 VmaAllocationInfo allocInfo;
2752 BufferInfo newBufInfo;
2753
2754 // Test double stack.
2755 {
2756 /*
2757 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2758 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2759
2760 Totally:
2761 1 block allocated
2762 10240 Vulkan bytes
2763 6 new allocations
2764 2256 bytes in allocations
2765 */
2766
2767 bufCreateInfo.size = 32;
2768 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2769 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002770 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002771 bufInfo.push_back(newBufInfo);
2772
2773 bufCreateInfo.size = 1024;
2774 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2775 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002776 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002777 bufInfo.push_back(newBufInfo);
2778
2779 bufCreateInfo.size = 32;
2780 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2781 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002782 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002783 bufInfo.push_back(newBufInfo);
2784
2785 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2786
2787 bufCreateInfo.size = 128;
2788 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2789 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002790 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002791 bufInfo.push_back(newBufInfo);
2792
2793 bufCreateInfo.size = 1024;
2794 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2795 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002796 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002797 bufInfo.push_back(newBufInfo);
2798
2799 bufCreateInfo.size = 16;
2800 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2801 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002802 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002803 bufInfo.push_back(newBufInfo);
2804
2805 VmaStats currStats;
2806 vmaCalculateStats(g_hAllocator, &currStats);
2807 VmaPoolStats poolStats;
2808 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2809
2810 char* statsStr = nullptr;
2811 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2812
2813 // PUT BREAKPOINT HERE TO CHECK.
2814 // Inspect: currStats versus origStats, poolStats, statsStr.
2815 int I = 0;
2816
2817 vmaFreeStatsString(g_hAllocator, statsStr);
2818
2819 // Destroy the buffers in reverse order.
2820 while(!bufInfo.empty())
2821 {
2822 const BufferInfo& currBufInfo = bufInfo.back();
2823 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2824 bufInfo.pop_back();
2825 }
2826 }
2827
2828 vmaDestroyPool(g_hAllocator, pool);
2829}
2830
Adam Sawicki80927152018-09-07 17:27:23 +02002831static void BenchmarkAlgorithmsCase(FILE* file,
2832 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002833 bool empty,
2834 VmaAllocationCreateFlags allocStrategy,
2835 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002836{
2837 RandomNumberGenerator rand{16223};
2838
2839 const VkDeviceSize bufSizeMin = 32;
2840 const VkDeviceSize bufSizeMax = 1024;
2841 const size_t maxBufCapacity = 10000;
2842 const uint32_t iterationCount = 10;
2843
2844 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2845 sampleBufCreateInfo.size = bufSizeMax;
2846 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2847
2848 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2849 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2850
2851 VmaPoolCreateInfo poolCreateInfo = {};
2852 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002853 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002854
2855 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002856 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002857 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2858
2859 VmaPool pool = nullptr;
2860 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002861 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002862
2863 // Buffer created just to get memory requirements. Never bound to any memory.
2864 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2865 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002866 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002867
2868 VkMemoryRequirements memReq = {};
2869 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2870
2871 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2872
2873 VmaAllocationCreateInfo allocCreateInfo = {};
2874 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002875 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002876
2877 VmaAllocation alloc;
2878 std::vector<VmaAllocation> baseAllocations;
2879
2880 if(!empty)
2881 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002882 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002883 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002884 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002885 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01002886 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
2887 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02002888 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2889 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002890 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002891 baseAllocations.push_back(alloc);
2892 totalSize += memReq.size;
2893 }
2894
2895 // Delete half of them, choose randomly.
2896 size_t allocsToDelete = baseAllocations.size() / 2;
2897 for(size_t i = 0; i < allocsToDelete; ++i)
2898 {
2899 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2900 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2901 baseAllocations.erase(baseAllocations.begin() + index);
2902 }
2903 }
2904
2905 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002906 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002907 std::vector<VmaAllocation> testAllocations;
2908 testAllocations.reserve(allocCount);
2909 duration allocTotalDuration = duration::zero();
2910 duration freeTotalDuration = duration::zero();
2911 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2912 {
2913 // Allocations
2914 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2915 for(size_t i = 0; i < allocCount; ++i)
2916 {
2917 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2918 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002919 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002920 testAllocations.push_back(alloc);
2921 }
2922 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2923
2924 // Deallocations
2925 switch(freeOrder)
2926 {
2927 case FREE_ORDER::FORWARD:
2928 // Leave testAllocations unchanged.
2929 break;
2930 case FREE_ORDER::BACKWARD:
2931 std::reverse(testAllocations.begin(), testAllocations.end());
2932 break;
2933 case FREE_ORDER::RANDOM:
2934 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2935 break;
2936 default: assert(0);
2937 }
2938
2939 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2940 for(size_t i = 0; i < allocCount; ++i)
2941 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2942 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2943
2944 testAllocations.clear();
2945 }
2946
2947 // Delete baseAllocations
2948 while(!baseAllocations.empty())
2949 {
2950 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2951 baseAllocations.pop_back();
2952 }
2953
2954 vmaDestroyPool(g_hAllocator, pool);
2955
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002956 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2957 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2958
Adam Sawicki80927152018-09-07 17:27:23 +02002959 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2960 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002961 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002962 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002963 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002964 allocTotalSeconds,
2965 freeTotalSeconds);
2966
2967 if(file)
2968 {
2969 std::string currTime;
2970 CurrentTimeToStr(currTime);
2971
Adam Sawicki80927152018-09-07 17:27:23 +02002972 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002973 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002974 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002975 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002976 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002977 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2978 allocTotalSeconds,
2979 freeTotalSeconds);
2980 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002981}
2982
Adam Sawicki80927152018-09-07 17:27:23 +02002983static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002984{
Adam Sawicki80927152018-09-07 17:27:23 +02002985 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002986
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002987 if(file)
2988 {
2989 fprintf(file,
2990 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002991 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002992 "Allocation time (s),Deallocation time (s)\n");
2993 }
2994
Adam Sawicki0a607132018-08-24 11:18:41 +02002995 uint32_t freeOrderCount = 1;
2996 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2997 freeOrderCount = 3;
2998 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2999 freeOrderCount = 2;
3000
3001 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003002 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003003
3004 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3005 {
3006 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3007 switch(freeOrderIndex)
3008 {
3009 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3010 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3011 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3012 default: assert(0);
3013 }
3014
3015 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3016 {
Adam Sawicki80927152018-09-07 17:27:23 +02003017 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003018 {
Adam Sawicki80927152018-09-07 17:27:23 +02003019 uint32_t algorithm = 0;
3020 switch(algorithmIndex)
3021 {
3022 case 0:
3023 break;
3024 case 1:
3025 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3026 break;
3027 case 2:
3028 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3029 break;
3030 default:
3031 assert(0);
3032 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003033
Adam Sawicki80927152018-09-07 17:27:23 +02003034 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003035 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3036 {
3037 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003038 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003039 {
3040 switch(allocStrategyIndex)
3041 {
3042 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3043 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3044 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3045 default: assert(0);
3046 }
3047 }
3048
Adam Sawicki80927152018-09-07 17:27:23 +02003049 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003050 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003051 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003052 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003053 strategy,
3054 freeOrder); // freeOrder
3055 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003056 }
3057 }
3058 }
3059}
3060
Adam Sawickib8333fb2018-03-13 16:15:53 +01003061static void TestPool_SameSize()
3062{
3063 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3064 const size_t BUF_COUNT = 100;
3065 VkResult res;
3066
3067 RandomNumberGenerator rand{123};
3068
3069 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3070 bufferInfo.size = BUF_SIZE;
3071 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3072
3073 uint32_t memoryTypeBits = UINT32_MAX;
3074 {
3075 VkBuffer dummyBuffer;
3076 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003077 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003078
3079 VkMemoryRequirements memReq;
3080 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3081 memoryTypeBits = memReq.memoryTypeBits;
3082
3083 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3084 }
3085
3086 VmaAllocationCreateInfo poolAllocInfo = {};
3087 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3088 uint32_t memTypeIndex;
3089 res = vmaFindMemoryTypeIndex(
3090 g_hAllocator,
3091 memoryTypeBits,
3092 &poolAllocInfo,
3093 &memTypeIndex);
3094
3095 VmaPoolCreateInfo poolCreateInfo = {};
3096 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3097 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3098 poolCreateInfo.minBlockCount = 1;
3099 poolCreateInfo.maxBlockCount = 4;
3100 poolCreateInfo.frameInUseCount = 0;
3101
3102 VmaPool pool;
3103 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003104 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003105
3106 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3107
3108 VmaAllocationCreateInfo allocInfo = {};
3109 allocInfo.pool = pool;
3110 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3111 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3112
3113 struct BufItem
3114 {
3115 VkBuffer Buf;
3116 VmaAllocation Alloc;
3117 };
3118 std::vector<BufItem> items;
3119
3120 // Fill entire pool.
3121 for(size_t i = 0; i < BUF_COUNT; ++i)
3122 {
3123 BufItem item;
3124 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003126 items.push_back(item);
3127 }
3128
3129 // Make sure that another allocation would fail.
3130 {
3131 BufItem item;
3132 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003133 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003134 }
3135
3136 // Validate that no buffer is lost. Also check that they are not mapped.
3137 for(size_t i = 0; i < items.size(); ++i)
3138 {
3139 VmaAllocationInfo allocInfo;
3140 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003141 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3142 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003143 }
3144
3145 // Free some percent of random items.
3146 {
3147 const size_t PERCENT_TO_FREE = 10;
3148 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3149 for(size_t i = 0; i < itemsToFree; ++i)
3150 {
3151 size_t index = (size_t)rand.Generate() % items.size();
3152 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3153 items.erase(items.begin() + index);
3154 }
3155 }
3156
3157 // Randomly allocate and free items.
3158 {
3159 const size_t OPERATION_COUNT = BUF_COUNT;
3160 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3161 {
3162 bool allocate = rand.Generate() % 2 != 0;
3163 if(allocate)
3164 {
3165 if(items.size() < BUF_COUNT)
3166 {
3167 BufItem item;
3168 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003169 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003170 items.push_back(item);
3171 }
3172 }
3173 else // Free
3174 {
3175 if(!items.empty())
3176 {
3177 size_t index = (size_t)rand.Generate() % items.size();
3178 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3179 items.erase(items.begin() + index);
3180 }
3181 }
3182 }
3183 }
3184
3185 // Allocate up to maximum.
3186 while(items.size() < BUF_COUNT)
3187 {
3188 BufItem item;
3189 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003190 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003191 items.push_back(item);
3192 }
3193
3194 // Validate that no buffer is lost.
3195 for(size_t i = 0; i < items.size(); ++i)
3196 {
3197 VmaAllocationInfo allocInfo;
3198 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003199 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003200 }
3201
3202 // Next frame.
3203 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3204
3205 // Allocate another BUF_COUNT buffers.
3206 for(size_t i = 0; i < BUF_COUNT; ++i)
3207 {
3208 BufItem item;
3209 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003210 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003211 items.push_back(item);
3212 }
3213
3214 // Make sure the first BUF_COUNT is lost. Delete them.
3215 for(size_t i = 0; i < BUF_COUNT; ++i)
3216 {
3217 VmaAllocationInfo allocInfo;
3218 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003219 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003220 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3221 }
3222 items.erase(items.begin(), items.begin() + BUF_COUNT);
3223
3224 // Validate that no buffer is lost.
3225 for(size_t i = 0; i < items.size(); ++i)
3226 {
3227 VmaAllocationInfo allocInfo;
3228 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003229 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003230 }
3231
3232 // Free one item.
3233 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3234 items.pop_back();
3235
3236 // Validate statistics.
3237 {
3238 VmaPoolStats poolStats = {};
3239 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003240 TEST(poolStats.allocationCount == items.size());
3241 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3242 TEST(poolStats.unusedRangeCount == 1);
3243 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3244 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003245 }
3246
3247 // Free all remaining items.
3248 for(size_t i = items.size(); i--; )
3249 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3250 items.clear();
3251
3252 // Allocate maximum items again.
3253 for(size_t i = 0; i < BUF_COUNT; ++i)
3254 {
3255 BufItem item;
3256 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003257 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003258 items.push_back(item);
3259 }
3260
3261 // Delete every other item.
3262 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3263 {
3264 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3265 items.erase(items.begin() + i);
3266 }
3267
3268 // Defragment!
3269 {
3270 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3271 for(size_t i = 0; i < items.size(); ++i)
3272 allocationsToDefragment[i] = items[i].Alloc;
3273
3274 VmaDefragmentationStats defragmentationStats;
3275 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003276 TEST(res == VK_SUCCESS);
3277 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003278 }
3279
3280 // Free all remaining items.
3281 for(size_t i = items.size(); i--; )
3282 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3283 items.clear();
3284
3285 ////////////////////////////////////////////////////////////////////////////////
3286 // Test for vmaMakePoolAllocationsLost
3287
3288 // Allocate 4 buffers on frame 10.
3289 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3290 for(size_t i = 0; i < 4; ++i)
3291 {
3292 BufItem item;
3293 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003294 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003295 items.push_back(item);
3296 }
3297
3298 // Touch first 2 of them on frame 11.
3299 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3300 for(size_t i = 0; i < 2; ++i)
3301 {
3302 VmaAllocationInfo allocInfo;
3303 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3304 }
3305
3306 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3307 size_t lostCount = 0xDEADC0DE;
3308 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003309 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003310
3311 // Make another call. Now 0 should be lost.
3312 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003313 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003314
3315 // Make another call, with null count. Should not crash.
3316 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3317
3318 // END: Free all remaining items.
3319 for(size_t i = items.size(); i--; )
3320 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3321
3322 items.clear();
3323
Adam Sawickid2924172018-06-11 12:48:46 +02003324 ////////////////////////////////////////////////////////////////////////////////
3325 // Test for allocation too large for pool
3326
3327 {
3328 VmaAllocationCreateInfo allocCreateInfo = {};
3329 allocCreateInfo.pool = pool;
3330
3331 VkMemoryRequirements memReq;
3332 memReq.memoryTypeBits = UINT32_MAX;
3333 memReq.alignment = 1;
3334 memReq.size = poolCreateInfo.blockSize + 4;
3335
3336 VmaAllocation alloc = nullptr;
3337 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003338 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003339 }
3340
Adam Sawickib8333fb2018-03-13 16:15:53 +01003341 vmaDestroyPool(g_hAllocator, pool);
3342}
3343
Adam Sawickib0c36362018-11-13 16:17:38 +01003344static void TestResize()
3345{
3346 wprintf(L"Testing vmaResizeAllocation...\n");
3347
3348 const VkDeviceSize KILOBYTE = 1024ull;
3349 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3350
3351 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3352 bufCreateInfo.size = 2 * MEGABYTE;
3353 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3354
3355 VmaAllocationCreateInfo allocCreateInfo = {};
3356 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3357
3358 uint32_t memTypeIndex = UINT32_MAX;
3359 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3360
3361 VmaPoolCreateInfo poolCreateInfo = {};
3362 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3363 poolCreateInfo.blockSize = 8 * MEGABYTE;
3364 poolCreateInfo.minBlockCount = 1;
3365 poolCreateInfo.maxBlockCount = 1;
3366 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3367
3368 VmaPool pool;
3369 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3370
3371 allocCreateInfo.pool = pool;
3372
3373 // Fill 8 MB pool with 4 * 2 MB allocations.
3374 VmaAllocation allocs[4] = {};
3375
3376 VkMemoryRequirements memReq = {};
3377 memReq.memoryTypeBits = UINT32_MAX;
3378 memReq.alignment = 4;
3379 memReq.size = bufCreateInfo.size;
3380
3381 VmaAllocationInfo allocInfo = {};
3382
3383 for(uint32_t i = 0; i < 4; ++i)
3384 {
3385 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3386 }
3387
3388 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3389
3390 // Case: Resize to the same size always succeeds.
3391 {
3392 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3393 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3394 TEST(allocInfo.size == 2ull * 1024 * 1024);
3395 }
3396
3397 // Case: Shrink allocation at the end.
3398 {
3399 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3400 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3401 TEST(allocInfo.size == 1ull * 1024 * 1024);
3402 }
3403
3404 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3405
3406 // Case: Shrink allocation before free space.
3407 {
3408 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3409 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3410 TEST(allocInfo.size == 512 * KILOBYTE);
3411 }
3412
3413 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3414
3415 // Case: Shrink allocation before next allocation.
3416 {
3417 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3418 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3419 TEST(allocInfo.size == 1 * MEGABYTE);
3420 }
3421
3422 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3423
3424 // Case: Grow allocation while there is even more space available.
3425 {
3426 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3427 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3428 TEST(allocInfo.size == 1 * MEGABYTE);
3429 }
3430
3431 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3432
3433 // Case: Grow allocation while there is exact amount of free space available.
3434 {
3435 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3436 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3437 TEST(allocInfo.size == 2 * MEGABYTE);
3438 }
3439
3440 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3441
3442 // Case: Fail to grow when there is not enough free space due to next allocation.
3443 {
3444 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3445 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3446 TEST(allocInfo.size == 2 * MEGABYTE);
3447 }
3448
3449 // Case: Fail to grow when there is not enough free space due to end of memory block.
3450 {
3451 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3452 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3453 TEST(allocInfo.size == 1 * MEGABYTE);
3454 }
3455
3456 for(uint32_t i = 4; i--; )
3457 {
3458 vmaFreeMemory(g_hAllocator, allocs[i]);
3459 }
3460
3461 vmaDestroyPool(g_hAllocator, pool);
3462
3463 // Test dedicated allocation
3464 {
3465 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3466 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3467 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3468
3469 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3470 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3471
3472 // Case: Resize to the same size always succeeds.
3473 {
3474 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3475 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3476 TEST(allocInfo.size == 2ull * 1024 * 1024);
3477 }
3478
3479 // Case: Shrinking fails.
3480 {
3481 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3482 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3483 TEST(allocInfo.size == 2ull * 1024 * 1024);
3484 }
3485
3486 // Case: Growing fails.
3487 {
3488 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3489 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3490 TEST(allocInfo.size == 2ull * 1024 * 1024);
3491 }
3492
3493 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3494 }
3495}
3496
Adam Sawickie44c6262018-06-15 14:30:39 +02003497static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3498{
3499 const uint8_t* pBytes = (const uint8_t*)pMemory;
3500 for(size_t i = 0; i < size; ++i)
3501 {
3502 if(pBytes[i] != pattern)
3503 {
3504 return false;
3505 }
3506 }
3507 return true;
3508}
3509
3510static void TestAllocationsInitialization()
3511{
3512 VkResult res;
3513
3514 const size_t BUF_SIZE = 1024;
3515
3516 // Create pool.
3517
3518 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3519 bufInfo.size = BUF_SIZE;
3520 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3521
3522 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3523 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3524
3525 VmaPoolCreateInfo poolCreateInfo = {};
3526 poolCreateInfo.blockSize = BUF_SIZE * 10;
3527 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3528 poolCreateInfo.maxBlockCount = 1;
3529 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003530 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003531
3532 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3533 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003534 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003535
3536 // Create one persistently mapped buffer to keep memory of this block mapped,
3537 // so that pointer to mapped data will remain (more or less...) valid even
3538 // after destruction of other allocations.
3539
3540 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3541 VkBuffer firstBuf;
3542 VmaAllocation firstAlloc;
3543 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003544 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003545
3546 // Test buffers.
3547
3548 for(uint32_t i = 0; i < 2; ++i)
3549 {
3550 const bool persistentlyMapped = i == 0;
3551 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3552 VkBuffer buf;
3553 VmaAllocation alloc;
3554 VmaAllocationInfo allocInfo;
3555 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003556 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003557
3558 void* pMappedData;
3559 if(!persistentlyMapped)
3560 {
3561 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003562 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003563 }
3564 else
3565 {
3566 pMappedData = allocInfo.pMappedData;
3567 }
3568
3569 // Validate initialized content
3570 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003571 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003572
3573 if(!persistentlyMapped)
3574 {
3575 vmaUnmapMemory(g_hAllocator, alloc);
3576 }
3577
3578 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3579
3580 // Validate freed content
3581 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003582 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003583 }
3584
3585 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3586 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3587}
3588
Adam Sawickib8333fb2018-03-13 16:15:53 +01003589static void TestPool_Benchmark(
3590 PoolTestResult& outResult,
3591 const PoolTestConfig& config)
3592{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003593 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003594
3595 RandomNumberGenerator mainRand{config.RandSeed};
3596
3597 uint32_t allocationSizeProbabilitySum = std::accumulate(
3598 config.AllocationSizes.begin(),
3599 config.AllocationSizes.end(),
3600 0u,
3601 [](uint32_t sum, const AllocationSize& allocSize) {
3602 return sum + allocSize.Probability;
3603 });
3604
3605 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3606 bufferInfo.size = 256; // Whatever.
3607 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3608
3609 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3610 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3611 imageInfo.extent.width = 256; // Whatever.
3612 imageInfo.extent.height = 256; // Whatever.
3613 imageInfo.extent.depth = 1;
3614 imageInfo.mipLevels = 1;
3615 imageInfo.arrayLayers = 1;
3616 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3617 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3618 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3619 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3620 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3621
3622 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3623 {
3624 VkBuffer dummyBuffer;
3625 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003626 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003627
3628 VkMemoryRequirements memReq;
3629 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3630 bufferMemoryTypeBits = memReq.memoryTypeBits;
3631
3632 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3633 }
3634
3635 uint32_t imageMemoryTypeBits = UINT32_MAX;
3636 {
3637 VkImage dummyImage;
3638 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003639 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003640
3641 VkMemoryRequirements memReq;
3642 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3643 imageMemoryTypeBits = memReq.memoryTypeBits;
3644
3645 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3646 }
3647
3648 uint32_t memoryTypeBits = 0;
3649 if(config.UsesBuffers() && config.UsesImages())
3650 {
3651 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3652 if(memoryTypeBits == 0)
3653 {
3654 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3655 return;
3656 }
3657 }
3658 else if(config.UsesBuffers())
3659 memoryTypeBits = bufferMemoryTypeBits;
3660 else if(config.UsesImages())
3661 memoryTypeBits = imageMemoryTypeBits;
3662 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003663 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003664
3665 VmaPoolCreateInfo poolCreateInfo = {};
3666 poolCreateInfo.memoryTypeIndex = 0;
3667 poolCreateInfo.minBlockCount = 1;
3668 poolCreateInfo.maxBlockCount = 1;
3669 poolCreateInfo.blockSize = config.PoolSize;
3670 poolCreateInfo.frameInUseCount = 1;
3671
3672 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3673 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3674 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3675
3676 VmaPool pool;
3677 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003678 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003679
3680 // Start time measurement - after creating pool and initializing data structures.
3681 time_point timeBeg = std::chrono::high_resolution_clock::now();
3682
3683 ////////////////////////////////////////////////////////////////////////////////
3684 // ThreadProc
3685 auto ThreadProc = [&](
3686 PoolTestThreadResult* outThreadResult,
3687 uint32_t randSeed,
3688 HANDLE frameStartEvent,
3689 HANDLE frameEndEvent) -> void
3690 {
3691 RandomNumberGenerator threadRand{randSeed};
3692
3693 outThreadResult->AllocationTimeMin = duration::max();
3694 outThreadResult->AllocationTimeSum = duration::zero();
3695 outThreadResult->AllocationTimeMax = duration::min();
3696 outThreadResult->DeallocationTimeMin = duration::max();
3697 outThreadResult->DeallocationTimeSum = duration::zero();
3698 outThreadResult->DeallocationTimeMax = duration::min();
3699 outThreadResult->AllocationCount = 0;
3700 outThreadResult->DeallocationCount = 0;
3701 outThreadResult->LostAllocationCount = 0;
3702 outThreadResult->LostAllocationTotalSize = 0;
3703 outThreadResult->FailedAllocationCount = 0;
3704 outThreadResult->FailedAllocationTotalSize = 0;
3705
3706 struct Item
3707 {
3708 VkDeviceSize BufferSize;
3709 VkExtent2D ImageSize;
3710 VkBuffer Buf;
3711 VkImage Image;
3712 VmaAllocation Alloc;
3713
3714 VkDeviceSize CalcSizeBytes() const
3715 {
3716 return BufferSize +
3717 ImageSize.width * ImageSize.height * 4;
3718 }
3719 };
3720 std::vector<Item> unusedItems, usedItems;
3721
3722 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3723
3724 // Create all items - all unused, not yet allocated.
3725 for(size_t i = 0; i < threadTotalItemCount; ++i)
3726 {
3727 Item item = {};
3728
3729 uint32_t allocSizeIndex = 0;
3730 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3731 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3732 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3733
3734 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3735 if(allocSize.BufferSizeMax > 0)
3736 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003737 TEST(allocSize.BufferSizeMin > 0);
3738 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003739 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3740 item.BufferSize = allocSize.BufferSizeMin;
3741 else
3742 {
3743 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3744 item.BufferSize = item.BufferSize / 16 * 16;
3745 }
3746 }
3747 else
3748 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003749 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003750 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3751 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3752 else
3753 {
3754 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3755 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3756 }
3757 }
3758
3759 unusedItems.push_back(item);
3760 }
3761
3762 auto Allocate = [&](Item& item) -> VkResult
3763 {
3764 VmaAllocationCreateInfo allocCreateInfo = {};
3765 allocCreateInfo.pool = pool;
3766 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3767 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3768
3769 if(item.BufferSize)
3770 {
3771 bufferInfo.size = item.BufferSize;
3772 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3773 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3774 }
3775 else
3776 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003777 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003778
3779 imageInfo.extent.width = item.ImageSize.width;
3780 imageInfo.extent.height = item.ImageSize.height;
3781 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3782 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3783 }
3784 };
3785
3786 ////////////////////////////////////////////////////////////////////////////////
3787 // Frames
3788 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3789 {
3790 WaitForSingleObject(frameStartEvent, INFINITE);
3791
3792 // Always make some percent of used bufs unused, to choose different used ones.
3793 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3794 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3795 {
3796 size_t index = threadRand.Generate() % usedItems.size();
3797 unusedItems.push_back(usedItems[index]);
3798 usedItems.erase(usedItems.begin() + index);
3799 }
3800
3801 // Determine which bufs we want to use in this frame.
3802 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3803 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003804 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003805 // Move some used to unused.
3806 while(usedBufCount < usedItems.size())
3807 {
3808 size_t index = threadRand.Generate() % usedItems.size();
3809 unusedItems.push_back(usedItems[index]);
3810 usedItems.erase(usedItems.begin() + index);
3811 }
3812 // Move some unused to used.
3813 while(usedBufCount > usedItems.size())
3814 {
3815 size_t index = threadRand.Generate() % unusedItems.size();
3816 usedItems.push_back(unusedItems[index]);
3817 unusedItems.erase(unusedItems.begin() + index);
3818 }
3819
3820 uint32_t touchExistingCount = 0;
3821 uint32_t touchLostCount = 0;
3822 uint32_t createSucceededCount = 0;
3823 uint32_t createFailedCount = 0;
3824
3825 // Touch all used bufs. If not created or lost, allocate.
3826 for(size_t i = 0; i < usedItems.size(); ++i)
3827 {
3828 Item& item = usedItems[i];
3829 // Not yet created.
3830 if(item.Alloc == VK_NULL_HANDLE)
3831 {
3832 res = Allocate(item);
3833 ++outThreadResult->AllocationCount;
3834 if(res != VK_SUCCESS)
3835 {
3836 item.Alloc = VK_NULL_HANDLE;
3837 item.Buf = VK_NULL_HANDLE;
3838 ++outThreadResult->FailedAllocationCount;
3839 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3840 ++createFailedCount;
3841 }
3842 else
3843 ++createSucceededCount;
3844 }
3845 else
3846 {
3847 // Touch.
3848 VmaAllocationInfo allocInfo;
3849 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3850 // Lost.
3851 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3852 {
3853 ++touchLostCount;
3854
3855 // Destroy.
3856 {
3857 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3858 if(item.Buf)
3859 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3860 else
3861 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3862 ++outThreadResult->DeallocationCount;
3863 }
3864 item.Alloc = VK_NULL_HANDLE;
3865 item.Buf = VK_NULL_HANDLE;
3866
3867 ++outThreadResult->LostAllocationCount;
3868 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3869
3870 // Recreate.
3871 res = Allocate(item);
3872 ++outThreadResult->AllocationCount;
3873 // Creation failed.
3874 if(res != VK_SUCCESS)
3875 {
3876 ++outThreadResult->FailedAllocationCount;
3877 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3878 ++createFailedCount;
3879 }
3880 else
3881 ++createSucceededCount;
3882 }
3883 else
3884 ++touchExistingCount;
3885 }
3886 }
3887
3888 /*
3889 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3890 randSeed, frameIndex,
3891 touchExistingCount, touchLostCount,
3892 createSucceededCount, createFailedCount);
3893 */
3894
3895 SetEvent(frameEndEvent);
3896 }
3897
3898 // Free all remaining items.
3899 for(size_t i = usedItems.size(); i--; )
3900 {
3901 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3902 if(usedItems[i].Buf)
3903 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3904 else
3905 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3906 ++outThreadResult->DeallocationCount;
3907 }
3908 for(size_t i = unusedItems.size(); i--; )
3909 {
3910 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3911 if(unusedItems[i].Buf)
3912 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3913 else
3914 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3915 ++outThreadResult->DeallocationCount;
3916 }
3917 };
3918
3919 // Launch threads.
3920 uint32_t threadRandSeed = mainRand.Generate();
3921 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3922 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3923 std::vector<std::thread> bkgThreads;
3924 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3925 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3926 {
3927 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3928 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3929 bkgThreads.emplace_back(std::bind(
3930 ThreadProc,
3931 &threadResults[threadIndex],
3932 threadRandSeed + threadIndex,
3933 frameStartEvents[threadIndex],
3934 frameEndEvents[threadIndex]));
3935 }
3936
3937 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003938 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003939 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3940 {
3941 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3942 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3943 SetEvent(frameStartEvents[threadIndex]);
3944 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3945 }
3946
3947 // Wait for threads finished
3948 for(size_t i = 0; i < bkgThreads.size(); ++i)
3949 {
3950 bkgThreads[i].join();
3951 CloseHandle(frameEndEvents[i]);
3952 CloseHandle(frameStartEvents[i]);
3953 }
3954 bkgThreads.clear();
3955
3956 // Finish time measurement - before destroying pool.
3957 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3958
3959 vmaDestroyPool(g_hAllocator, pool);
3960
3961 outResult.AllocationTimeMin = duration::max();
3962 outResult.AllocationTimeAvg = duration::zero();
3963 outResult.AllocationTimeMax = duration::min();
3964 outResult.DeallocationTimeMin = duration::max();
3965 outResult.DeallocationTimeAvg = duration::zero();
3966 outResult.DeallocationTimeMax = duration::min();
3967 outResult.LostAllocationCount = 0;
3968 outResult.LostAllocationTotalSize = 0;
3969 outResult.FailedAllocationCount = 0;
3970 outResult.FailedAllocationTotalSize = 0;
3971 size_t allocationCount = 0;
3972 size_t deallocationCount = 0;
3973 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3974 {
3975 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3976 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3977 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3978 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3979 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3980 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3981 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3982 allocationCount += threadResult.AllocationCount;
3983 deallocationCount += threadResult.DeallocationCount;
3984 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3985 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3986 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3987 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3988 }
3989 if(allocationCount)
3990 outResult.AllocationTimeAvg /= allocationCount;
3991 if(deallocationCount)
3992 outResult.DeallocationTimeAvg /= deallocationCount;
3993}
3994
3995static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3996{
3997 if(ptr1 < ptr2)
3998 return ptr1 + size1 > ptr2;
3999 else if(ptr2 < ptr1)
4000 return ptr2 + size2 > ptr1;
4001 else
4002 return true;
4003}
4004
4005static void TestMapping()
4006{
4007 wprintf(L"Testing mapping...\n");
4008
4009 VkResult res;
4010 uint32_t memTypeIndex = UINT32_MAX;
4011
4012 enum TEST
4013 {
4014 TEST_NORMAL,
4015 TEST_POOL,
4016 TEST_DEDICATED,
4017 TEST_COUNT
4018 };
4019 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4020 {
4021 VmaPool pool = nullptr;
4022 if(testIndex == TEST_POOL)
4023 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004024 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004025 VmaPoolCreateInfo poolInfo = {};
4026 poolInfo.memoryTypeIndex = memTypeIndex;
4027 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004028 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004029 }
4030
4031 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4032 bufInfo.size = 0x10000;
4033 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4034
4035 VmaAllocationCreateInfo allocCreateInfo = {};
4036 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4037 allocCreateInfo.pool = pool;
4038 if(testIndex == TEST_DEDICATED)
4039 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4040
4041 VmaAllocationInfo allocInfo;
4042
4043 // Mapped manually
4044
4045 // Create 2 buffers.
4046 BufferInfo bufferInfos[3];
4047 for(size_t i = 0; i < 2; ++i)
4048 {
4049 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4050 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004051 TEST(res == VK_SUCCESS);
4052 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004053 memTypeIndex = allocInfo.memoryType;
4054 }
4055
4056 // Map buffer 0.
4057 char* data00 = nullptr;
4058 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004059 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004060 data00[0xFFFF] = data00[0];
4061
4062 // Map buffer 0 second time.
4063 char* data01 = nullptr;
4064 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004065 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004066
4067 // Map buffer 1.
4068 char* data1 = nullptr;
4069 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004070 TEST(res == VK_SUCCESS && data1 != nullptr);
4071 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004072 data1[0xFFFF] = data1[0];
4073
4074 // Unmap buffer 0 two times.
4075 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4076 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4077 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004078 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004079
4080 // Unmap buffer 1.
4081 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4082 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004083 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004084
4085 // Create 3rd buffer - persistently mapped.
4086 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4087 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4088 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004089 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004090
4091 // Map buffer 2.
4092 char* data2 = nullptr;
4093 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004094 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004095 data2[0xFFFF] = data2[0];
4096
4097 // Unmap buffer 2.
4098 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4099 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004100 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004101
4102 // Destroy all buffers.
4103 for(size_t i = 3; i--; )
4104 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4105
4106 vmaDestroyPool(g_hAllocator, pool);
4107 }
4108}
4109
Adam Sawickidaa6a552019-06-25 15:26:37 +02004110// Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
4111static void TestDeviceLocalMapped()
4112{
4113 VkResult res;
4114
4115 for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
4116 {
4117 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4118 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4119 bufCreateInfo.size = 4096;
4120
4121 VmaPool pool = VK_NULL_HANDLE;
4122 VmaAllocationCreateInfo allocCreateInfo = {};
4123 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4124 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
4125 if(testIndex == 2)
4126 {
4127 VmaPoolCreateInfo poolCreateInfo = {};
4128 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4129 TEST(res == VK_SUCCESS);
4130 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
4131 TEST(res == VK_SUCCESS);
4132 allocCreateInfo.pool = pool;
4133 }
4134 else if(testIndex == 1)
4135 {
4136 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
4137 }
4138
4139 VkBuffer buf = VK_NULL_HANDLE;
4140 VmaAllocation alloc = VK_NULL_HANDLE;
4141 VmaAllocationInfo allocInfo = {};
4142 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
4143 TEST(res == VK_SUCCESS && alloc);
4144
4145 VkMemoryPropertyFlags memTypeFlags = 0;
4146 vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
4147 const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
4148 TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
4149
4150 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4151 vmaDestroyPool(g_hAllocator, pool);
4152 }
4153}
4154
Adam Sawickib8333fb2018-03-13 16:15:53 +01004155static void TestMappingMultithreaded()
4156{
4157 wprintf(L"Testing mapping multithreaded...\n");
4158
4159 static const uint32_t threadCount = 16;
4160 static const uint32_t bufferCount = 1024;
4161 static const uint32_t threadBufferCount = bufferCount / threadCount;
4162
4163 VkResult res;
4164 volatile uint32_t memTypeIndex = UINT32_MAX;
4165
4166 enum TEST
4167 {
4168 TEST_NORMAL,
4169 TEST_POOL,
4170 TEST_DEDICATED,
4171 TEST_COUNT
4172 };
4173 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4174 {
4175 VmaPool pool = nullptr;
4176 if(testIndex == TEST_POOL)
4177 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004178 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004179 VmaPoolCreateInfo poolInfo = {};
4180 poolInfo.memoryTypeIndex = memTypeIndex;
4181 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004182 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004183 }
4184
4185 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4186 bufCreateInfo.size = 0x10000;
4187 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4188
4189 VmaAllocationCreateInfo allocCreateInfo = {};
4190 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4191 allocCreateInfo.pool = pool;
4192 if(testIndex == TEST_DEDICATED)
4193 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4194
4195 std::thread threads[threadCount];
4196 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4197 {
4198 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4199 // ======== THREAD FUNCTION ========
4200
4201 RandomNumberGenerator rand{threadIndex};
4202
4203 enum class MODE
4204 {
4205 // Don't map this buffer at all.
4206 DONT_MAP,
4207 // Map and quickly unmap.
4208 MAP_FOR_MOMENT,
4209 // Map and unmap before destruction.
4210 MAP_FOR_LONGER,
4211 // Map two times. Quickly unmap, second unmap before destruction.
4212 MAP_TWO_TIMES,
4213 // Create this buffer as persistently mapped.
4214 PERSISTENTLY_MAPPED,
4215 COUNT
4216 };
4217 std::vector<BufferInfo> bufInfos{threadBufferCount};
4218 std::vector<MODE> bufModes{threadBufferCount};
4219
4220 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4221 {
4222 BufferInfo& bufInfo = bufInfos[bufferIndex];
4223 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4224 bufModes[bufferIndex] = mode;
4225
4226 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4227 if(mode == MODE::PERSISTENTLY_MAPPED)
4228 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4229
4230 VmaAllocationInfo allocInfo;
4231 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4232 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004233 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004234
4235 if(memTypeIndex == UINT32_MAX)
4236 memTypeIndex = allocInfo.memoryType;
4237
4238 char* data = nullptr;
4239
4240 if(mode == MODE::PERSISTENTLY_MAPPED)
4241 {
4242 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004243 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004244 }
4245 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4246 mode == MODE::MAP_TWO_TIMES)
4247 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004248 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004249 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004250 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004251
4252 if(mode == MODE::MAP_TWO_TIMES)
4253 {
4254 char* data2 = nullptr;
4255 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004256 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004257 }
4258 }
4259 else if(mode == MODE::DONT_MAP)
4260 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004261 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004262 }
4263 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004264 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004265
4266 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4267 if(data)
4268 data[0xFFFF] = data[0];
4269
4270 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4271 {
4272 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4273
4274 VmaAllocationInfo allocInfo;
4275 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4276 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004277 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004278 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004279 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004280 }
4281
4282 switch(rand.Generate() % 3)
4283 {
4284 case 0: Sleep(0); break; // Yield.
4285 case 1: Sleep(10); break; // 10 ms
4286 // default: No sleep.
4287 }
4288
4289 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4290 if(data)
4291 data[0xFFFF] = data[0];
4292 }
4293
4294 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4295 {
4296 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4297 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4298 {
4299 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4300
4301 VmaAllocationInfo allocInfo;
4302 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004303 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004304 }
4305
4306 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4307 }
4308 });
4309 }
4310
4311 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4312 threads[threadIndex].join();
4313
4314 vmaDestroyPool(g_hAllocator, pool);
4315 }
4316}
4317
4318static void WriteMainTestResultHeader(FILE* file)
4319{
4320 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004321 "Code,Time,"
4322 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004323 "Total Time (us),"
4324 "Allocation Time Min (us),"
4325 "Allocation Time Avg (us),"
4326 "Allocation Time Max (us),"
4327 "Deallocation Time Min (us),"
4328 "Deallocation Time Avg (us),"
4329 "Deallocation Time Max (us),"
4330 "Total Memory Allocated (B),"
4331 "Free Range Size Avg (B),"
4332 "Free Range Size Max (B)\n");
4333}
4334
4335static void WriteMainTestResult(
4336 FILE* file,
4337 const char* codeDescription,
4338 const char* testDescription,
4339 const Config& config, const Result& result)
4340{
4341 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4342 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4343 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4344 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4345 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4346 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4347 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4348
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004349 std::string currTime;
4350 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004351
4352 fprintf(file,
4353 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004354 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4355 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004356 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004357 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004358 totalTimeSeconds * 1e6f,
4359 allocationTimeMinSeconds * 1e6f,
4360 allocationTimeAvgSeconds * 1e6f,
4361 allocationTimeMaxSeconds * 1e6f,
4362 deallocationTimeMinSeconds * 1e6f,
4363 deallocationTimeAvgSeconds * 1e6f,
4364 deallocationTimeMaxSeconds * 1e6f,
4365 result.TotalMemoryAllocated,
4366 result.FreeRangeSizeAvg,
4367 result.FreeRangeSizeMax);
4368}
4369
4370static void WritePoolTestResultHeader(FILE* file)
4371{
4372 fprintf(file,
4373 "Code,Test,Time,"
4374 "Config,"
4375 "Total Time (us),"
4376 "Allocation Time Min (us),"
4377 "Allocation Time Avg (us),"
4378 "Allocation Time Max (us),"
4379 "Deallocation Time Min (us),"
4380 "Deallocation Time Avg (us),"
4381 "Deallocation Time Max (us),"
4382 "Lost Allocation Count,"
4383 "Lost Allocation Total Size (B),"
4384 "Failed Allocation Count,"
4385 "Failed Allocation Total Size (B)\n");
4386}
4387
4388static void WritePoolTestResult(
4389 FILE* file,
4390 const char* codeDescription,
4391 const char* testDescription,
4392 const PoolTestConfig& config,
4393 const PoolTestResult& result)
4394{
4395 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4396 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4397 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4398 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4399 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4400 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4401 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4402
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004403 std::string currTime;
4404 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004405
4406 fprintf(file,
4407 "%s,%s,%s,"
4408 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4409 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4410 // General
4411 codeDescription,
4412 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004413 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004414 // Config
4415 config.ThreadCount,
4416 (unsigned long long)config.PoolSize,
4417 config.FrameCount,
4418 config.TotalItemCount,
4419 config.UsedItemCountMin,
4420 config.UsedItemCountMax,
4421 config.ItemsToMakeUnusedPercent,
4422 // Results
4423 totalTimeSeconds * 1e6f,
4424 allocationTimeMinSeconds * 1e6f,
4425 allocationTimeAvgSeconds * 1e6f,
4426 allocationTimeMaxSeconds * 1e6f,
4427 deallocationTimeMinSeconds * 1e6f,
4428 deallocationTimeAvgSeconds * 1e6f,
4429 deallocationTimeMaxSeconds * 1e6f,
4430 result.LostAllocationCount,
4431 result.LostAllocationTotalSize,
4432 result.FailedAllocationCount,
4433 result.FailedAllocationTotalSize);
4434}
4435
4436static void PerformCustomMainTest(FILE* file)
4437{
4438 Config config{};
4439 config.RandSeed = 65735476;
4440 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4441 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4442 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4443 config.FreeOrder = FREE_ORDER::FORWARD;
4444 config.ThreadCount = 16;
4445 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004446 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004447
4448 // Buffers
4449 //config.AllocationSizes.push_back({4, 16, 1024});
4450 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4451
4452 // Images
4453 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4454 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4455
4456 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4457 config.AdditionalOperationCount = 1024;
4458
4459 Result result{};
4460 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004461 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004462 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4463}
4464
4465static void PerformCustomPoolTest(FILE* file)
4466{
4467 PoolTestConfig config;
4468 config.PoolSize = 100 * 1024 * 1024;
4469 config.RandSeed = 2345764;
4470 config.ThreadCount = 1;
4471 config.FrameCount = 200;
4472 config.ItemsToMakeUnusedPercent = 2;
4473
4474 AllocationSize allocSize = {};
4475 allocSize.BufferSizeMin = 1024;
4476 allocSize.BufferSizeMax = 1024 * 1024;
4477 allocSize.Probability = 1;
4478 config.AllocationSizes.push_back(allocSize);
4479
4480 allocSize.BufferSizeMin = 0;
4481 allocSize.BufferSizeMax = 0;
4482 allocSize.ImageSizeMin = 128;
4483 allocSize.ImageSizeMax = 1024;
4484 allocSize.Probability = 1;
4485 config.AllocationSizes.push_back(allocSize);
4486
4487 config.PoolSize = config.CalcAvgResourceSize() * 200;
4488 config.UsedItemCountMax = 160;
4489 config.TotalItemCount = config.UsedItemCountMax * 10;
4490 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4491
4492 g_MemoryAliasingWarningEnabled = false;
4493 PoolTestResult result = {};
4494 TestPool_Benchmark(result, config);
4495 g_MemoryAliasingWarningEnabled = true;
4496
4497 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4498}
4499
Adam Sawickib8333fb2018-03-13 16:15:53 +01004500static void PerformMainTests(FILE* file)
4501{
4502 uint32_t repeatCount = 1;
4503 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4504
4505 Config config{};
4506 config.RandSeed = 65735476;
4507 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4508 config.FreeOrder = FREE_ORDER::FORWARD;
4509
4510 size_t threadCountCount = 1;
4511 switch(ConfigType)
4512 {
4513 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4514 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4515 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4516 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4517 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4518 default: assert(0);
4519 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004520
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004521 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004522
Adam Sawickib8333fb2018-03-13 16:15:53 +01004523 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4524 {
4525 std::string desc1;
4526
4527 switch(threadCountIndex)
4528 {
4529 case 0:
4530 desc1 += "1_thread";
4531 config.ThreadCount = 1;
4532 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4533 break;
4534 case 1:
4535 desc1 += "16_threads+0%_common";
4536 config.ThreadCount = 16;
4537 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4538 break;
4539 case 2:
4540 desc1 += "16_threads+50%_common";
4541 config.ThreadCount = 16;
4542 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4543 break;
4544 case 3:
4545 desc1 += "16_threads+100%_common";
4546 config.ThreadCount = 16;
4547 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4548 break;
4549 case 4:
4550 desc1 += "2_threads+0%_common";
4551 config.ThreadCount = 2;
4552 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4553 break;
4554 case 5:
4555 desc1 += "2_threads+50%_common";
4556 config.ThreadCount = 2;
4557 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4558 break;
4559 case 6:
4560 desc1 += "2_threads+100%_common";
4561 config.ThreadCount = 2;
4562 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4563 break;
4564 default:
4565 assert(0);
4566 }
4567
4568 // 0 = buffers, 1 = images, 2 = buffers and images
4569 size_t buffersVsImagesCount = 2;
4570 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4571 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4572 {
4573 std::string desc2 = desc1;
4574 switch(buffersVsImagesIndex)
4575 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004576 case 0: desc2 += ",Buffers"; break;
4577 case 1: desc2 += ",Images"; break;
4578 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004579 default: assert(0);
4580 }
4581
4582 // 0 = small, 1 = large, 2 = small and large
4583 size_t smallVsLargeCount = 2;
4584 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4585 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4586 {
4587 std::string desc3 = desc2;
4588 switch(smallVsLargeIndex)
4589 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004590 case 0: desc3 += ",Small"; break;
4591 case 1: desc3 += ",Large"; break;
4592 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004593 default: assert(0);
4594 }
4595
4596 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4597 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4598 else
4599 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4600
4601 // 0 = varying sizes min...max, 1 = set of constant sizes
4602 size_t constantSizesCount = 1;
4603 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4604 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4605 {
4606 std::string desc4 = desc3;
4607 switch(constantSizesIndex)
4608 {
4609 case 0: desc4 += " Varying_sizes"; break;
4610 case 1: desc4 += " Constant_sizes"; break;
4611 default: assert(0);
4612 }
4613
4614 config.AllocationSizes.clear();
4615 // Buffers present
4616 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4617 {
4618 // Small
4619 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4620 {
4621 // Varying size
4622 if(constantSizesIndex == 0)
4623 config.AllocationSizes.push_back({4, 16, 1024});
4624 // Constant sizes
4625 else
4626 {
4627 config.AllocationSizes.push_back({1, 16, 16});
4628 config.AllocationSizes.push_back({1, 64, 64});
4629 config.AllocationSizes.push_back({1, 256, 256});
4630 config.AllocationSizes.push_back({1, 1024, 1024});
4631 }
4632 }
4633 // Large
4634 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4635 {
4636 // Varying size
4637 if(constantSizesIndex == 0)
4638 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4639 // Constant sizes
4640 else
4641 {
4642 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4643 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4644 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4645 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4646 }
4647 }
4648 }
4649 // Images present
4650 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4651 {
4652 // Small
4653 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4654 {
4655 // Varying size
4656 if(constantSizesIndex == 0)
4657 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4658 // Constant sizes
4659 else
4660 {
4661 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4662 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4663 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4664 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4665 }
4666 }
4667 // Large
4668 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4669 {
4670 // Varying size
4671 if(constantSizesIndex == 0)
4672 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4673 // Constant sizes
4674 else
4675 {
4676 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4677 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4678 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4679 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4680 }
4681 }
4682 }
4683
4684 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4685 size_t beginBytesToAllocateCount = 1;
4686 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4687 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4688 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4689 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4690 {
4691 std::string desc5 = desc4;
4692
4693 switch(beginBytesToAllocateIndex)
4694 {
4695 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004696 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004697 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4698 config.AdditionalOperationCount = 0;
4699 break;
4700 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004701 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004702 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4703 config.AdditionalOperationCount = 1024;
4704 break;
4705 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004706 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004707 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4708 config.AdditionalOperationCount = 1024;
4709 break;
4710 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004711 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004712 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4713 config.AdditionalOperationCount = 1024;
4714 break;
4715 default:
4716 assert(0);
4717 }
4718
Adam Sawicki0667e332018-08-24 17:26:44 +02004719 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004720 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004721 std::string desc6 = desc5;
4722 switch(strategyIndex)
4723 {
4724 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004725 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004726 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4727 break;
4728 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004729 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004730 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4731 break;
4732 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004733 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004734 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4735 break;
4736 default:
4737 assert(0);
4738 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004739
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004740 desc6 += ',';
4741 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004742
4743 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004744
4745 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4746 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004747 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004748
4749 Result result{};
4750 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004751 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004752 if(file)
4753 {
4754 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4755 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004756 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004757 }
4758 }
4759 }
4760 }
4761 }
4762 }
4763}
4764
4765static void PerformPoolTests(FILE* file)
4766{
4767 const size_t AVG_RESOURCES_PER_POOL = 300;
4768
4769 uint32_t repeatCount = 1;
4770 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4771
4772 PoolTestConfig config{};
4773 config.RandSeed = 2346343;
4774 config.FrameCount = 200;
4775 config.ItemsToMakeUnusedPercent = 2;
4776
4777 size_t threadCountCount = 1;
4778 switch(ConfigType)
4779 {
4780 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4781 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4782 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4783 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4784 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4785 default: assert(0);
4786 }
4787 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4788 {
4789 std::string desc1;
4790
4791 switch(threadCountIndex)
4792 {
4793 case 0:
4794 desc1 += "1_thread";
4795 config.ThreadCount = 1;
4796 break;
4797 case 1:
4798 desc1 += "16_threads";
4799 config.ThreadCount = 16;
4800 break;
4801 case 2:
4802 desc1 += "2_threads";
4803 config.ThreadCount = 2;
4804 break;
4805 default:
4806 assert(0);
4807 }
4808
4809 // 0 = buffers, 1 = images, 2 = buffers and images
4810 size_t buffersVsImagesCount = 2;
4811 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4812 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4813 {
4814 std::string desc2 = desc1;
4815 switch(buffersVsImagesIndex)
4816 {
4817 case 0: desc2 += " Buffers"; break;
4818 case 1: desc2 += " Images"; break;
4819 case 2: desc2 += " Buffers+Images"; break;
4820 default: assert(0);
4821 }
4822
4823 // 0 = small, 1 = large, 2 = small and large
4824 size_t smallVsLargeCount = 2;
4825 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4826 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4827 {
4828 std::string desc3 = desc2;
4829 switch(smallVsLargeIndex)
4830 {
4831 case 0: desc3 += " Small"; break;
4832 case 1: desc3 += " Large"; break;
4833 case 2: desc3 += " Small+Large"; break;
4834 default: assert(0);
4835 }
4836
4837 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4838 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4839 else
4840 config.PoolSize = 4ull * 1024 * 1024;
4841
4842 // 0 = varying sizes min...max, 1 = set of constant sizes
4843 size_t constantSizesCount = 1;
4844 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4845 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4846 {
4847 std::string desc4 = desc3;
4848 switch(constantSizesIndex)
4849 {
4850 case 0: desc4 += " Varying_sizes"; break;
4851 case 1: desc4 += " Constant_sizes"; break;
4852 default: assert(0);
4853 }
4854
4855 config.AllocationSizes.clear();
4856 // Buffers present
4857 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4858 {
4859 // Small
4860 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4861 {
4862 // Varying size
4863 if(constantSizesIndex == 0)
4864 config.AllocationSizes.push_back({4, 16, 1024});
4865 // Constant sizes
4866 else
4867 {
4868 config.AllocationSizes.push_back({1, 16, 16});
4869 config.AllocationSizes.push_back({1, 64, 64});
4870 config.AllocationSizes.push_back({1, 256, 256});
4871 config.AllocationSizes.push_back({1, 1024, 1024});
4872 }
4873 }
4874 // Large
4875 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4876 {
4877 // Varying size
4878 if(constantSizesIndex == 0)
4879 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4880 // Constant sizes
4881 else
4882 {
4883 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4884 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4885 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4886 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4887 }
4888 }
4889 }
4890 // Images present
4891 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4892 {
4893 // Small
4894 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4895 {
4896 // Varying size
4897 if(constantSizesIndex == 0)
4898 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4899 // Constant sizes
4900 else
4901 {
4902 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4903 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4904 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4905 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4906 }
4907 }
4908 // Large
4909 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4910 {
4911 // Varying size
4912 if(constantSizesIndex == 0)
4913 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4914 // Constant sizes
4915 else
4916 {
4917 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4918 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4919 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4920 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4921 }
4922 }
4923 }
4924
4925 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4926 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4927
4928 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4929 size_t subscriptionModeCount;
4930 switch(ConfigType)
4931 {
4932 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4933 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4934 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4935 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4936 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4937 default: assert(0);
4938 }
4939 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4940 {
4941 std::string desc5 = desc4;
4942
4943 switch(subscriptionModeIndex)
4944 {
4945 case 0:
4946 desc5 += " Subscription_66%";
4947 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4948 break;
4949 case 1:
4950 desc5 += " Subscription_133%";
4951 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4952 break;
4953 case 2:
4954 desc5 += " Subscription_100%";
4955 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4956 break;
4957 case 3:
4958 desc5 += " Subscription_33%";
4959 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4960 break;
4961 case 4:
4962 desc5 += " Subscription_166%";
4963 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4964 break;
4965 default:
4966 assert(0);
4967 }
4968
4969 config.TotalItemCount = config.UsedItemCountMax * 5;
4970 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4971
4972 const char* testDescription = desc5.c_str();
4973
4974 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4975 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004976 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004977
4978 PoolTestResult result{};
4979 g_MemoryAliasingWarningEnabled = false;
4980 TestPool_Benchmark(result, config);
4981 g_MemoryAliasingWarningEnabled = true;
4982 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4983 }
4984 }
4985 }
4986 }
4987 }
4988 }
4989}
4990
Adam Sawickia83793a2018-09-03 13:40:42 +02004991static void BasicTestBuddyAllocator()
4992{
4993 wprintf(L"Basic test buddy allocator\n");
4994
4995 RandomNumberGenerator rand{76543};
4996
4997 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4998 sampleBufCreateInfo.size = 1024; // Whatever.
4999 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5000
5001 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5002 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5003
5004 VmaPoolCreateInfo poolCreateInfo = {};
5005 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005006 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005007
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02005008 // Deliberately adding 1023 to test usable size smaller than memory block size.
5009 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02005010 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02005011 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02005012
5013 VmaPool pool = nullptr;
5014 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005015 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005016
5017 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
5018
5019 VmaAllocationCreateInfo allocCreateInfo = {};
5020 allocCreateInfo.pool = pool;
5021
5022 std::vector<BufferInfo> bufInfo;
5023 BufferInfo newBufInfo;
5024 VmaAllocationInfo allocInfo;
5025
5026 bufCreateInfo.size = 1024 * 256;
5027 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5028 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005029 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005030 bufInfo.push_back(newBufInfo);
5031
5032 bufCreateInfo.size = 1024 * 512;
5033 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5034 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005035 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005036 bufInfo.push_back(newBufInfo);
5037
5038 bufCreateInfo.size = 1024 * 128;
5039 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5040 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005041 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02005042 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02005043
5044 // Test very small allocation, smaller than minimum node size.
5045 bufCreateInfo.size = 1;
5046 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5047 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005048 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02005049 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02005050
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005051 // Test some small allocation with alignment requirement.
5052 {
5053 VkMemoryRequirements memReq;
5054 memReq.alignment = 256;
5055 memReq.memoryTypeBits = UINT32_MAX;
5056 memReq.size = 32;
5057
5058 newBufInfo.Buffer = VK_NULL_HANDLE;
5059 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
5060 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005061 TEST(res == VK_SUCCESS);
5062 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005063 bufInfo.push_back(newBufInfo);
5064 }
5065
5066 //SaveAllocatorStatsToFile(L"TEST.json");
5067
Adam Sawicki21017c62018-09-07 15:26:59 +02005068 VmaPoolStats stats = {};
5069 vmaGetPoolStats(g_hAllocator, pool, &stats);
5070 int DBG = 0; // Set breakpoint here to inspect `stats`.
5071
Adam Sawicki80927152018-09-07 17:27:23 +02005072 // Allocate enough new buffers to surely fall into second block.
5073 for(uint32_t i = 0; i < 32; ++i)
5074 {
5075 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
5076 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5077 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005078 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02005079 bufInfo.push_back(newBufInfo);
5080 }
5081
5082 SaveAllocatorStatsToFile(L"BuddyTest01.json");
5083
Adam Sawickia83793a2018-09-03 13:40:42 +02005084 // Destroy the buffers in random order.
5085 while(!bufInfo.empty())
5086 {
5087 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
5088 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
5089 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
5090 bufInfo.erase(bufInfo.begin() + indexToDestroy);
5091 }
5092
5093 vmaDestroyPool(g_hAllocator, pool);
5094}
5095
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005096static void BasicTestAllocatePages()
5097{
5098 wprintf(L"Basic test allocate pages\n");
5099
5100 RandomNumberGenerator rand{765461};
5101
5102 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5103 sampleBufCreateInfo.size = 1024; // Whatever.
5104 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5105
5106 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5107 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5108
5109 VmaPoolCreateInfo poolCreateInfo = {};
5110 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02005111 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005112
5113 // 1 block of 1 MB.
5114 poolCreateInfo.blockSize = 1024 * 1024;
5115 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
5116
5117 // Create pool.
5118 VmaPool pool = nullptr;
5119 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02005120 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005121
5122 // Make 100 allocations of 4 KB - they should fit into the pool.
5123 VkMemoryRequirements memReq;
5124 memReq.memoryTypeBits = UINT32_MAX;
5125 memReq.alignment = 4 * 1024;
5126 memReq.size = 4 * 1024;
5127
5128 VmaAllocationCreateInfo allocCreateInfo = {};
5129 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5130 allocCreateInfo.pool = pool;
5131
5132 constexpr uint32_t allocCount = 100;
5133
5134 std::vector<VmaAllocation> alloc{allocCount};
5135 std::vector<VmaAllocationInfo> allocInfo{allocCount};
5136 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005137 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005138 for(uint32_t i = 0; i < allocCount; ++i)
5139 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005140 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005141 allocInfo[i].pMappedData != nullptr &&
5142 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5143 allocInfo[i].memoryType == allocInfo[0].memoryType);
5144 }
5145
5146 // Free the allocations.
5147 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5148 std::fill(alloc.begin(), alloc.end(), nullptr);
5149 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5150
5151 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5152 // Also test optional allocationInfo = null.
5153 memReq.size = 100 * 1024;
5154 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005155 TEST(res != VK_SUCCESS);
5156 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005157
5158 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5159 memReq.size = 4 * 1024;
5160 memReq.alignment = 128 * 1024;
5161 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005162 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005163
5164 // Make 100 dedicated allocations of 4 KB.
5165 memReq.alignment = 4 * 1024;
5166 memReq.size = 4 * 1024;
5167
5168 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5169 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5170 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5171 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005172 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005173 for(uint32_t i = 0; i < allocCount; ++i)
5174 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005175 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005176 allocInfo[i].pMappedData != nullptr &&
5177 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5178 allocInfo[i].offset == 0);
5179 if(i > 0)
5180 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005181 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005182 }
5183 }
5184
5185 // Free the allocations.
5186 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5187 std::fill(alloc.begin(), alloc.end(), nullptr);
5188 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5189
5190 vmaDestroyPool(g_hAllocator, pool);
5191}
5192
Adam Sawickif2975342018-10-16 13:49:02 +02005193// Test the testing environment.
5194static void TestGpuData()
5195{
5196 RandomNumberGenerator rand = { 53434 };
5197
5198 std::vector<AllocInfo> allocInfo;
5199
5200 for(size_t i = 0; i < 100; ++i)
5201 {
5202 AllocInfo info = {};
5203
5204 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5205 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5206 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5207 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5208 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5209
5210 VmaAllocationCreateInfo allocCreateInfo = {};
5211 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5212
5213 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5214 TEST(res == VK_SUCCESS);
5215
5216 info.m_StartValue = rand.Generate();
5217
5218 allocInfo.push_back(std::move(info));
5219 }
5220
5221 UploadGpuData(allocInfo.data(), allocInfo.size());
5222
5223 ValidateGpuData(allocInfo.data(), allocInfo.size());
5224
5225 DestroyAllAllocations(allocInfo);
5226}
5227
Adam Sawickib8333fb2018-03-13 16:15:53 +01005228void Test()
5229{
5230 wprintf(L"TESTING:\n");
5231
Adam Sawicki5c8af7b2018-12-10 13:34:54 +01005232 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005233 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005234 ////////////////////////////////////////////////////////////////////////////////
5235 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02005236 return;
5237 }
5238
Adam Sawickib8333fb2018-03-13 16:15:53 +01005239 // # Simple tests
5240
5241 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005242 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005243#if VMA_DEBUG_MARGIN
5244 TestDebugMargin();
5245#else
5246 TestPool_SameSize();
5247 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01005248 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02005249#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005250#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5251 TestAllocationsInitialization();
5252#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005253 TestMapping();
Adam Sawickidaa6a552019-06-25 15:26:37 +02005254 TestDeviceLocalMapped();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005255 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005256 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005257 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005258 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005259
Adam Sawicki4338f662018-09-07 14:12:37 +02005260 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005261 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005262
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005263 {
5264 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005265 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005266 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005267 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005268 fclose(file);
5269 }
5270
Adam Sawickib8333fb2018-03-13 16:15:53 +01005271 TestDefragmentationSimple();
5272 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005273 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005274 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005275
5276 // # Detailed tests
5277 FILE* file;
5278 fopen_s(&file, "Results.csv", "w");
5279 assert(file != NULL);
5280
5281 WriteMainTestResultHeader(file);
5282 PerformMainTests(file);
5283 //PerformCustomMainTest(file);
5284
5285 WritePoolTestResultHeader(file);
5286 PerformPoolTests(file);
5287 //PerformCustomPoolTest(file);
5288
5289 fclose(file);
5290
5291 wprintf(L"Done.\n");
5292}
5293
Adam Sawickif1a793c2018-03-13 15:42:22 +01005294#endif // #ifdef _WIN32