blob: bdde63bacc0ae52794719bf650e5a3733bd258cb [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
2// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawicki94ce3d72019-04-17 14:59:25 +020029#include <functional>
Adam Sawickif1a793c2018-03-13 15:42:22 +010030
31#ifdef _WIN32
32
Adam Sawicki33d2ce72018-08-27 13:59:13 +020033static const char* CODE_DESCRIPTION = "Foo";
34
Adam Sawickif2975342018-10-16 13:49:02 +020035extern VkCommandBuffer g_hTemporaryCommandBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +020036extern const VkAllocationCallbacks* g_Allocs;
Adam Sawickif2975342018-10-16 13:49:02 +020037void BeginSingleTimeCommands();
38void EndSingleTimeCommands();
39
Adam Sawickibdb89a92018-12-13 11:56:30 +010040#ifndef VMA_DEBUG_MARGIN
41 #define VMA_DEBUG_MARGIN 0
42#endif
43
Adam Sawicki0a607132018-08-24 11:18:41 +020044enum CONFIG_TYPE {
45 CONFIG_TYPE_MINIMUM,
46 CONFIG_TYPE_SMALL,
47 CONFIG_TYPE_AVERAGE,
48 CONFIG_TYPE_LARGE,
49 CONFIG_TYPE_MAXIMUM,
50 CONFIG_TYPE_COUNT
51};
52
Adam Sawickif2975342018-10-16 13:49:02 +020053static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
54//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020055
Adam Sawickib8333fb2018-03-13 16:15:53 +010056enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
57
Adam Sawicki0667e332018-08-24 17:26:44 +020058static const char* FREE_ORDER_NAMES[] = {
59 "FORWARD",
60 "BACKWARD",
61 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020062};
63
Adam Sawicki80927152018-09-07 17:27:23 +020064// Copy of internal VmaAlgorithmToStr.
65static const char* AlgorithmToStr(uint32_t algorithm)
66{
67 switch(algorithm)
68 {
69 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
70 return "Linear";
71 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
72 return "Buddy";
73 case 0:
74 return "Default";
75 default:
76 assert(0);
77 return "";
78 }
79}
80
Adam Sawickib8333fb2018-03-13 16:15:53 +010081struct AllocationSize
82{
83 uint32_t Probability;
84 VkDeviceSize BufferSizeMin, BufferSizeMax;
85 uint32_t ImageSizeMin, ImageSizeMax;
86};
87
88struct Config
89{
90 uint32_t RandSeed;
91 VkDeviceSize BeginBytesToAllocate;
92 uint32_t AdditionalOperationCount;
93 VkDeviceSize MaxBytesToAllocate;
94 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
95 std::vector<AllocationSize> AllocationSizes;
96 uint32_t ThreadCount;
97 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
98 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020099 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +0100100};
101
102struct Result
103{
104 duration TotalTime;
105 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
106 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
107 VkDeviceSize TotalMemoryAllocated;
108 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
109};
110
111void TestDefragmentationSimple();
112void TestDefragmentationFull();
113
114struct PoolTestConfig
115{
116 uint32_t RandSeed;
117 uint32_t ThreadCount;
118 VkDeviceSize PoolSize;
119 uint32_t FrameCount;
120 uint32_t TotalItemCount;
121 // Range for number of items used in each frame.
122 uint32_t UsedItemCountMin, UsedItemCountMax;
123 // Percent of items to make unused, and possibly make some others used in each frame.
124 uint32_t ItemsToMakeUnusedPercent;
125 std::vector<AllocationSize> AllocationSizes;
126
127 VkDeviceSize CalcAvgResourceSize() const
128 {
129 uint32_t probabilitySum = 0;
130 VkDeviceSize sizeSum = 0;
131 for(size_t i = 0; i < AllocationSizes.size(); ++i)
132 {
133 const AllocationSize& allocSize = AllocationSizes[i];
134 if(allocSize.BufferSizeMax > 0)
135 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
136 else
137 {
138 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
139 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
140 }
141 probabilitySum += allocSize.Probability;
142 }
143 return sizeSum / probabilitySum;
144 }
145
146 bool UsesBuffers() const
147 {
148 for(size_t i = 0; i < AllocationSizes.size(); ++i)
149 if(AllocationSizes[i].BufferSizeMax > 0)
150 return true;
151 return false;
152 }
153
154 bool UsesImages() const
155 {
156 for(size_t i = 0; i < AllocationSizes.size(); ++i)
157 if(AllocationSizes[i].ImageSizeMax > 0)
158 return true;
159 return false;
160 }
161};
162
163struct PoolTestResult
164{
165 duration TotalTime;
166 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
167 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
168 size_t LostAllocationCount, LostAllocationTotalSize;
169 size_t FailedAllocationCount, FailedAllocationTotalSize;
170};
171
172static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
173
Adam Sawicki51fa9662018-10-03 13:44:29 +0200174uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200175
Adam Sawickib8333fb2018-03-13 16:15:53 +0100176struct BufferInfo
177{
178 VkBuffer Buffer = VK_NULL_HANDLE;
179 VmaAllocation Allocation = VK_NULL_HANDLE;
180};
181
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200182static uint32_t GetAllocationStrategyCount()
183{
184 uint32_t strategyCount = 0;
185 switch(ConfigType)
186 {
187 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
188 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
189 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
190 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
191 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
192 default: assert(0);
193 }
194 return strategyCount;
195}
196
197static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
198{
199 switch(allocStrategy)
200 {
201 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
202 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
203 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
204 case 0: return "Default"; break;
205 default: assert(0); return "";
206 }
207}
208
Adam Sawickib8333fb2018-03-13 16:15:53 +0100209static void InitResult(Result& outResult)
210{
211 outResult.TotalTime = duration::zero();
212 outResult.AllocationTimeMin = duration::max();
213 outResult.AllocationTimeAvg = duration::zero();
214 outResult.AllocationTimeMax = duration::min();
215 outResult.DeallocationTimeMin = duration::max();
216 outResult.DeallocationTimeAvg = duration::zero();
217 outResult.DeallocationTimeMax = duration::min();
218 outResult.TotalMemoryAllocated = 0;
219 outResult.FreeRangeSizeAvg = 0;
220 outResult.FreeRangeSizeMax = 0;
221}
222
223class TimeRegisterObj
224{
225public:
226 TimeRegisterObj(duration& min, duration& sum, duration& max) :
227 m_Min(min),
228 m_Sum(sum),
229 m_Max(max),
230 m_TimeBeg(std::chrono::high_resolution_clock::now())
231 {
232 }
233
234 ~TimeRegisterObj()
235 {
236 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
237 m_Sum += d;
238 if(d < m_Min) m_Min = d;
239 if(d > m_Max) m_Max = d;
240 }
241
242private:
243 duration& m_Min;
244 duration& m_Sum;
245 duration& m_Max;
246 time_point m_TimeBeg;
247};
248
249struct PoolTestThreadResult
250{
251 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
252 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
253 size_t AllocationCount, DeallocationCount;
254 size_t LostAllocationCount, LostAllocationTotalSize;
255 size_t FailedAllocationCount, FailedAllocationTotalSize;
256};
257
258class AllocationTimeRegisterObj : public TimeRegisterObj
259{
260public:
261 AllocationTimeRegisterObj(Result& result) :
262 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
263 {
264 }
265};
266
267class DeallocationTimeRegisterObj : public TimeRegisterObj
268{
269public:
270 DeallocationTimeRegisterObj(Result& result) :
271 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
272 {
273 }
274};
275
276class PoolAllocationTimeRegisterObj : public TimeRegisterObj
277{
278public:
279 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
280 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
281 {
282 }
283};
284
285class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
286{
287public:
288 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
289 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
290 {
291 }
292};
293
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200294static void CurrentTimeToStr(std::string& out)
295{
296 time_t rawTime; time(&rawTime);
297 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
298 char timeStr[128];
299 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
300 out = timeStr;
301}
302
Adam Sawickib8333fb2018-03-13 16:15:53 +0100303VkResult MainTest(Result& outResult, const Config& config)
304{
305 assert(config.ThreadCount > 0);
306
307 InitResult(outResult);
308
309 RandomNumberGenerator mainRand{config.RandSeed};
310
311 time_point timeBeg = std::chrono::high_resolution_clock::now();
312
313 std::atomic<size_t> allocationCount = 0;
314 VkResult res = VK_SUCCESS;
315
316 uint32_t memUsageProbabilitySum =
317 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
318 config.MemUsageProbability[2] + config.MemUsageProbability[3];
319 assert(memUsageProbabilitySum > 0);
320
321 uint32_t allocationSizeProbabilitySum = std::accumulate(
322 config.AllocationSizes.begin(),
323 config.AllocationSizes.end(),
324 0u,
325 [](uint32_t sum, const AllocationSize& allocSize) {
326 return sum + allocSize.Probability;
327 });
328
329 struct Allocation
330 {
331 VkBuffer Buffer;
332 VkImage Image;
333 VmaAllocation Alloc;
334 };
335
336 std::vector<Allocation> commonAllocations;
337 std::mutex commonAllocationsMutex;
338
339 auto Allocate = [&](
340 VkDeviceSize bufferSize,
341 const VkExtent2D imageExtent,
342 RandomNumberGenerator& localRand,
343 VkDeviceSize& totalAllocatedBytes,
344 std::vector<Allocation>& allocations) -> VkResult
345 {
346 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
347
348 uint32_t memUsageIndex = 0;
349 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
350 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
351 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
352
353 VmaAllocationCreateInfo memReq = {};
354 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200355 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100356
357 Allocation allocation = {};
358 VmaAllocationInfo allocationInfo;
359
360 // Buffer
361 if(bufferSize > 0)
362 {
363 assert(imageExtent.width == 0);
364 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
365 bufferInfo.size = bufferSize;
366 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
367
368 {
369 AllocationTimeRegisterObj timeRegisterObj{outResult};
370 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
371 }
372 }
373 // Image
374 else
375 {
376 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
377 imageInfo.imageType = VK_IMAGE_TYPE_2D;
378 imageInfo.extent.width = imageExtent.width;
379 imageInfo.extent.height = imageExtent.height;
380 imageInfo.extent.depth = 1;
381 imageInfo.mipLevels = 1;
382 imageInfo.arrayLayers = 1;
383 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
384 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
385 VK_IMAGE_TILING_OPTIMAL :
386 VK_IMAGE_TILING_LINEAR;
387 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
388 switch(memReq.usage)
389 {
390 case VMA_MEMORY_USAGE_GPU_ONLY:
391 switch(localRand.Generate() % 3)
392 {
393 case 0:
394 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
395 break;
396 case 1:
397 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
398 break;
399 case 2:
400 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
401 break;
402 }
403 break;
404 case VMA_MEMORY_USAGE_CPU_ONLY:
405 case VMA_MEMORY_USAGE_CPU_TO_GPU:
406 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
407 break;
408 case VMA_MEMORY_USAGE_GPU_TO_CPU:
409 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
410 break;
411 }
412 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
413 imageInfo.flags = 0;
414
415 {
416 AllocationTimeRegisterObj timeRegisterObj{outResult};
417 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
418 }
419 }
420
421 if(res == VK_SUCCESS)
422 {
423 ++allocationCount;
424 totalAllocatedBytes += allocationInfo.size;
425 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
426 if(useCommonAllocations)
427 {
428 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
429 commonAllocations.push_back(allocation);
430 }
431 else
432 allocations.push_back(allocation);
433 }
434 else
435 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200436 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100437 }
438 return res;
439 };
440
441 auto GetNextAllocationSize = [&](
442 VkDeviceSize& outBufSize,
443 VkExtent2D& outImageSize,
444 RandomNumberGenerator& localRand)
445 {
446 outBufSize = 0;
447 outImageSize = {0, 0};
448
449 uint32_t allocSizeIndex = 0;
450 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
451 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
452 r -= config.AllocationSizes[allocSizeIndex++].Probability;
453
454 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
455 if(allocSize.BufferSizeMax > 0)
456 {
457 assert(allocSize.ImageSizeMax == 0);
458 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
459 outBufSize = allocSize.BufferSizeMin;
460 else
461 {
462 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
463 outBufSize = outBufSize / 16 * 16;
464 }
465 }
466 else
467 {
468 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
469 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
470 else
471 {
472 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
473 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
474 }
475 }
476 };
477
478 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
479 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
480
481 auto ThreadProc = [&](uint32_t randSeed) -> void
482 {
483 RandomNumberGenerator threadRand(randSeed);
484 VkDeviceSize threadTotalAllocatedBytes = 0;
485 std::vector<Allocation> threadAllocations;
486 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
487 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
488 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
489
490 // BEGIN ALLOCATIONS
491 for(;;)
492 {
493 VkDeviceSize bufferSize = 0;
494 VkExtent2D imageExtent = {};
495 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
496 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
497 threadBeginBytesToAllocate)
498 {
499 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
500 break;
501 }
502 else
503 break;
504 }
505
506 // ADDITIONAL ALLOCATIONS AND FREES
507 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
508 {
509 VkDeviceSize bufferSize = 0;
510 VkExtent2D imageExtent = {};
511 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
512
513 // true = allocate, false = free
514 bool allocate = threadRand.Generate() % 2 != 0;
515
516 if(allocate)
517 {
518 if(threadTotalAllocatedBytes +
519 bufferSize +
520 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
521 threadMaxBytesToAllocate)
522 {
523 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
524 break;
525 }
526 }
527 else
528 {
529 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
530 if(useCommonAllocations)
531 {
532 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
533 if(!commonAllocations.empty())
534 {
535 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
536 VmaAllocationInfo allocationInfo;
537 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
538 if(threadTotalAllocatedBytes >= allocationInfo.size)
539 {
540 DeallocationTimeRegisterObj timeRegisterObj{outResult};
541 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
542 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
543 else
544 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
545 threadTotalAllocatedBytes -= allocationInfo.size;
546 commonAllocations.erase(commonAllocations.begin() + indexToFree);
547 }
548 }
549 }
550 else
551 {
552 if(!threadAllocations.empty())
553 {
554 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
555 VmaAllocationInfo allocationInfo;
556 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
557 if(threadTotalAllocatedBytes >= allocationInfo.size)
558 {
559 DeallocationTimeRegisterObj timeRegisterObj{outResult};
560 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
561 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
562 else
563 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
564 threadTotalAllocatedBytes -= allocationInfo.size;
565 threadAllocations.erase(threadAllocations.begin() + indexToFree);
566 }
567 }
568 }
569 }
570 }
571
572 ++numThreadsReachedMaxAllocations;
573
574 WaitForSingleObject(threadsFinishEvent, INFINITE);
575
576 // DEALLOCATION
577 while(!threadAllocations.empty())
578 {
579 size_t indexToFree = 0;
580 switch(config.FreeOrder)
581 {
582 case FREE_ORDER::FORWARD:
583 indexToFree = 0;
584 break;
585 case FREE_ORDER::BACKWARD:
586 indexToFree = threadAllocations.size() - 1;
587 break;
588 case FREE_ORDER::RANDOM:
589 indexToFree = mainRand.Generate() % threadAllocations.size();
590 break;
591 }
592
593 {
594 DeallocationTimeRegisterObj timeRegisterObj{outResult};
595 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
596 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
597 else
598 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
599 }
600 threadAllocations.erase(threadAllocations.begin() + indexToFree);
601 }
602 };
603
604 uint32_t threadRandSeed = mainRand.Generate();
605 std::vector<std::thread> bkgThreads;
606 for(size_t i = 0; i < config.ThreadCount; ++i)
607 {
608 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
609 }
610
611 // Wait for threads reached max allocations
612 while(numThreadsReachedMaxAllocations < config.ThreadCount)
613 Sleep(0);
614
615 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
616 VmaStats vmaStats = {};
617 vmaCalculateStats(g_hAllocator, &vmaStats);
618 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
619 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
620 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
621
622 // Signal threads to deallocate
623 SetEvent(threadsFinishEvent);
624
625 // Wait for threads finished
626 for(size_t i = 0; i < bkgThreads.size(); ++i)
627 bkgThreads[i].join();
628 bkgThreads.clear();
629
630 CloseHandle(threadsFinishEvent);
631
632 // Deallocate remaining common resources
633 while(!commonAllocations.empty())
634 {
635 size_t indexToFree = 0;
636 switch(config.FreeOrder)
637 {
638 case FREE_ORDER::FORWARD:
639 indexToFree = 0;
640 break;
641 case FREE_ORDER::BACKWARD:
642 indexToFree = commonAllocations.size() - 1;
643 break;
644 case FREE_ORDER::RANDOM:
645 indexToFree = mainRand.Generate() % commonAllocations.size();
646 break;
647 }
648
649 {
650 DeallocationTimeRegisterObj timeRegisterObj{outResult};
651 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
652 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
653 else
654 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
655 }
656 commonAllocations.erase(commonAllocations.begin() + indexToFree);
657 }
658
659 if(allocationCount)
660 {
661 outResult.AllocationTimeAvg /= allocationCount;
662 outResult.DeallocationTimeAvg /= allocationCount;
663 }
664
665 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
666
667 return res;
668}
669
Adam Sawicki51fa9662018-10-03 13:44:29 +0200670void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100671{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100672 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100673 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200674 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100675 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200676 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100677}
678
679struct AllocInfo
680{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200681 VmaAllocation m_Allocation = VK_NULL_HANDLE;
682 VkBuffer m_Buffer = VK_NULL_HANDLE;
683 VkImage m_Image = VK_NULL_HANDLE;
684 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100685 union
686 {
687 VkBufferCreateInfo m_BufferInfo;
688 VkImageCreateInfo m_ImageInfo;
689 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200690
691 void CreateBuffer(
692 const VkBufferCreateInfo& bufCreateInfo,
693 const VmaAllocationCreateInfo& allocCreateInfo);
694 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100695};
696
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200697void AllocInfo::CreateBuffer(
698 const VkBufferCreateInfo& bufCreateInfo,
699 const VmaAllocationCreateInfo& allocCreateInfo)
700{
701 m_BufferInfo = bufCreateInfo;
702 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
703 TEST(res == VK_SUCCESS);
704}
705
706void AllocInfo::Destroy()
707{
708 if(m_Image)
709 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200710 vkDestroyImage(g_hDevice, m_Image, g_Allocs);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200711 }
712 if(m_Buffer)
713 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200714 vkDestroyBuffer(g_hDevice, m_Buffer, g_Allocs);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200715 }
716 if(m_Allocation)
717 {
718 vmaFreeMemory(g_hAllocator, m_Allocation);
719 }
720}
721
Adam Sawickif2975342018-10-16 13:49:02 +0200722class StagingBufferCollection
723{
724public:
725 StagingBufferCollection() { }
726 ~StagingBufferCollection();
727 // Returns false if maximum total size of buffers would be exceeded.
728 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
729 void ReleaseAllBuffers();
730
731private:
732 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
733 struct BufInfo
734 {
735 VmaAllocation Allocation = VK_NULL_HANDLE;
736 VkBuffer Buffer = VK_NULL_HANDLE;
737 VkDeviceSize Size = VK_WHOLE_SIZE;
738 void* MappedPtr = nullptr;
739 bool Used = false;
740 };
741 std::vector<BufInfo> m_Bufs;
742 // Including both used and unused.
743 VkDeviceSize m_TotalSize = 0;
744};
745
746StagingBufferCollection::~StagingBufferCollection()
747{
748 for(size_t i = m_Bufs.size(); i--; )
749 {
750 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
751 }
752}
753
754bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
755{
756 assert(size <= MAX_TOTAL_SIZE);
757
758 // Try to find existing unused buffer with best size.
759 size_t bestIndex = SIZE_MAX;
760 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
761 {
762 BufInfo& currBufInfo = m_Bufs[i];
763 if(!currBufInfo.Used && currBufInfo.Size >= size &&
764 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
765 {
766 bestIndex = i;
767 }
768 }
769
770 if(bestIndex != SIZE_MAX)
771 {
772 m_Bufs[bestIndex].Used = true;
773 outBuffer = m_Bufs[bestIndex].Buffer;
774 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
775 return true;
776 }
777
778 // Allocate new buffer with requested size.
779 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
780 {
781 BufInfo bufInfo;
782 bufInfo.Size = size;
783 bufInfo.Used = true;
784
785 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
786 bufCreateInfo.size = size;
787 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
788
789 VmaAllocationCreateInfo allocCreateInfo = {};
790 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
791 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
792
793 VmaAllocationInfo allocInfo;
794 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
795 bufInfo.MappedPtr = allocInfo.pMappedData;
796 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
797
798 outBuffer = bufInfo.Buffer;
799 outMappedPtr = bufInfo.MappedPtr;
800
801 m_Bufs.push_back(std::move(bufInfo));
802
803 m_TotalSize += size;
804
805 return true;
806 }
807
808 // There are some unused but smaller buffers: Free them and try again.
809 bool hasUnused = false;
810 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
811 {
812 if(!m_Bufs[i].Used)
813 {
814 hasUnused = true;
815 break;
816 }
817 }
818 if(hasUnused)
819 {
820 for(size_t i = m_Bufs.size(); i--; )
821 {
822 if(!m_Bufs[i].Used)
823 {
824 m_TotalSize -= m_Bufs[i].Size;
825 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
826 m_Bufs.erase(m_Bufs.begin() + i);
827 }
828 }
829
830 return AcquireBuffer(size, outBuffer, outMappedPtr);
831 }
832
833 return false;
834}
835
836void StagingBufferCollection::ReleaseAllBuffers()
837{
838 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
839 {
840 m_Bufs[i].Used = false;
841 }
842}
843
844static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
845{
846 StagingBufferCollection stagingBufs;
847
848 bool cmdBufferStarted = false;
849 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
850 {
851 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
852 if(currAllocInfo.m_Buffer)
853 {
854 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
855
856 VkBuffer stagingBuf = VK_NULL_HANDLE;
857 void* stagingBufMappedPtr = nullptr;
858 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
859 {
860 TEST(cmdBufferStarted);
861 EndSingleTimeCommands();
862 stagingBufs.ReleaseAllBuffers();
863 cmdBufferStarted = false;
864
865 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
866 TEST(ok);
867 }
868
869 // Fill staging buffer.
870 {
871 assert(size % sizeof(uint32_t) == 0);
872 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
873 uint32_t val = currAllocInfo.m_StartValue;
874 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
875 {
876 *stagingValPtr = val;
877 ++stagingValPtr;
878 ++val;
879 }
880 }
881
882 // Issue copy command from staging buffer to destination buffer.
883 if(!cmdBufferStarted)
884 {
885 cmdBufferStarted = true;
886 BeginSingleTimeCommands();
887 }
888
889 VkBufferCopy copy = {};
890 copy.srcOffset = 0;
891 copy.dstOffset = 0;
892 copy.size = size;
893 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
894 }
895 else
896 {
897 TEST(0 && "Images not currently supported.");
898 }
899 }
900
901 if(cmdBufferStarted)
902 {
903 EndSingleTimeCommands();
904 stagingBufs.ReleaseAllBuffers();
905 }
906}
907
908static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
909{
910 StagingBufferCollection stagingBufs;
911
912 bool cmdBufferStarted = false;
913 size_t validateAllocIndexOffset = 0;
914 std::vector<void*> validateStagingBuffers;
915 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
916 {
917 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
918 if(currAllocInfo.m_Buffer)
919 {
920 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
921
922 VkBuffer stagingBuf = VK_NULL_HANDLE;
923 void* stagingBufMappedPtr = nullptr;
924 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
925 {
926 TEST(cmdBufferStarted);
927 EndSingleTimeCommands();
928 cmdBufferStarted = false;
929
930 for(size_t validateIndex = 0;
931 validateIndex < validateStagingBuffers.size();
932 ++validateIndex)
933 {
934 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
935 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
936 TEST(validateSize % sizeof(uint32_t) == 0);
937 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
938 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
939 bool valid = true;
940 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
941 {
942 if(*stagingValPtr != val)
943 {
944 valid = false;
945 break;
946 }
947 ++stagingValPtr;
948 ++val;
949 }
950 TEST(valid);
951 }
952
953 stagingBufs.ReleaseAllBuffers();
954
955 validateAllocIndexOffset = allocInfoIndex;
956 validateStagingBuffers.clear();
957
958 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
959 TEST(ok);
960 }
961
962 // Issue copy command from staging buffer to destination buffer.
963 if(!cmdBufferStarted)
964 {
965 cmdBufferStarted = true;
966 BeginSingleTimeCommands();
967 }
968
969 VkBufferCopy copy = {};
970 copy.srcOffset = 0;
971 copy.dstOffset = 0;
972 copy.size = size;
973 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
974
975 // Sava mapped pointer for later validation.
976 validateStagingBuffers.push_back(stagingBufMappedPtr);
977 }
978 else
979 {
980 TEST(0 && "Images not currently supported.");
981 }
982 }
983
984 if(cmdBufferStarted)
985 {
986 EndSingleTimeCommands();
987
988 for(size_t validateIndex = 0;
989 validateIndex < validateStagingBuffers.size();
990 ++validateIndex)
991 {
992 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
993 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
994 TEST(validateSize % sizeof(uint32_t) == 0);
995 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
996 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
997 bool valid = true;
998 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
999 {
1000 if(*stagingValPtr != val)
1001 {
1002 valid = false;
1003 break;
1004 }
1005 ++stagingValPtr;
1006 ++val;
1007 }
1008 TEST(valid);
1009 }
1010
1011 stagingBufs.ReleaseAllBuffers();
1012 }
1013}
1014
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1016{
1017 outMemReq = {};
1018 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1019 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1020}
1021
1022static void CreateBuffer(
1023 VmaPool pool,
1024 const VkBufferCreateInfo& bufCreateInfo,
1025 bool persistentlyMapped,
1026 AllocInfo& outAllocInfo)
1027{
1028 outAllocInfo = {};
1029 outAllocInfo.m_BufferInfo = bufCreateInfo;
1030
1031 VmaAllocationCreateInfo allocCreateInfo = {};
1032 allocCreateInfo.pool = pool;
1033 if(persistentlyMapped)
1034 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1035
1036 VmaAllocationInfo vmaAllocInfo = {};
1037 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1038
1039 // Setup StartValue and fill.
1040 {
1041 outAllocInfo.m_StartValue = (uint32_t)rand();
1042 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001043 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001044 if(!persistentlyMapped)
1045 {
1046 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1047 }
1048
1049 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001050 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001051 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1052 data[i] = value++;
1053
1054 if(!persistentlyMapped)
1055 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1056 }
1057}
1058
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001059static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001060{
1061 outAllocation.m_Allocation = nullptr;
1062 outAllocation.m_Buffer = nullptr;
1063 outAllocation.m_Image = nullptr;
1064 outAllocation.m_StartValue = (uint32_t)rand();
1065
1066 VmaAllocationCreateInfo vmaMemReq;
1067 GetMemReq(vmaMemReq);
1068
1069 VmaAllocationInfo allocInfo;
1070
1071 const bool isBuffer = true;//(rand() & 0x1) != 0;
1072 const bool isLarge = (rand() % 16) == 0;
1073 if(isBuffer)
1074 {
1075 const uint32_t bufferSize = isLarge ?
1076 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1077 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1078
1079 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1080 bufferInfo.size = bufferSize;
1081 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1082
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001083 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001084 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001085 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001086 }
1087 else
1088 {
1089 const uint32_t imageSizeX = isLarge ?
1090 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1091 rand() % 1024 + 1; // 1 ... 1024
1092 const uint32_t imageSizeY = isLarge ?
1093 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1094 rand() % 1024 + 1; // 1 ... 1024
1095
1096 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1097 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1098 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1099 imageInfo.extent.width = imageSizeX;
1100 imageInfo.extent.height = imageSizeY;
1101 imageInfo.extent.depth = 1;
1102 imageInfo.mipLevels = 1;
1103 imageInfo.arrayLayers = 1;
1104 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1105 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1106 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1107 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1108
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001109 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001110 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001111 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001112 }
1113
1114 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1115 if(allocInfo.pMappedData == nullptr)
1116 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001117 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001118 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001119 }
1120
1121 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001122 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001123 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1124 data[i] = value++;
1125
1126 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001127 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001128}
1129
1130static void DestroyAllocation(const AllocInfo& allocation)
1131{
1132 if(allocation.m_Buffer)
1133 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1134 else
1135 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1136}
1137
1138static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1139{
1140 for(size_t i = allocations.size(); i--; )
1141 DestroyAllocation(allocations[i]);
1142 allocations.clear();
1143}
1144
1145static void ValidateAllocationData(const AllocInfo& allocation)
1146{
1147 VmaAllocationInfo allocInfo;
1148 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1149
1150 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1151 if(allocInfo.pMappedData == nullptr)
1152 {
1153 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001154 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001155 }
1156
1157 uint32_t value = allocation.m_StartValue;
1158 bool ok = true;
1159 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001160 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001161 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1162 {
1163 if(data[i] != value++)
1164 {
1165 ok = false;
1166 break;
1167 }
1168 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001169 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001170
1171 if(allocInfo.pMappedData == nullptr)
1172 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1173}
1174
1175static void RecreateAllocationResource(AllocInfo& allocation)
1176{
1177 VmaAllocationInfo allocInfo;
1178 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1179
1180 if(allocation.m_Buffer)
1181 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001182 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001183
Adam Sawicki1f84f622019-07-02 13:40:01 +02001184 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, g_Allocs, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001185 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001186
1187 // Just to silence validation layer warnings.
1188 VkMemoryRequirements vkMemReq;
1189 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001190 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001191
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001192 res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001193 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001194 }
1195 else
1196 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001197 vkDestroyImage(g_hDevice, allocation.m_Image, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001198
Adam Sawicki1f84f622019-07-02 13:40:01 +02001199 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, g_Allocs, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001200 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001201
1202 // Just to silence validation layer warnings.
1203 VkMemoryRequirements vkMemReq;
1204 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1205
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001206 res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001207 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001208 }
1209}
1210
1211static void Defragment(AllocInfo* allocs, size_t allocCount,
1212 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1213 VmaDefragmentationStats* defragmentationStats = nullptr)
1214{
1215 std::vector<VmaAllocation> vmaAllocs(allocCount);
1216 for(size_t i = 0; i < allocCount; ++i)
1217 vmaAllocs[i] = allocs[i].m_Allocation;
1218
1219 std::vector<VkBool32> allocChanged(allocCount);
1220
1221 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1222 defragmentationInfo, defragmentationStats) );
1223
1224 for(size_t i = 0; i < allocCount; ++i)
1225 {
1226 if(allocChanged[i])
1227 {
1228 RecreateAllocationResource(allocs[i]);
1229 }
1230 }
1231}
1232
1233static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1234{
1235 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1236 ValidateAllocationData(allocInfo);
1237 });
1238}
1239
1240void TestDefragmentationSimple()
1241{
1242 wprintf(L"Test defragmentation simple\n");
1243
1244 RandomNumberGenerator rand(667);
1245
1246 const VkDeviceSize BUF_SIZE = 0x10000;
1247 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1248
1249 const VkDeviceSize MIN_BUF_SIZE = 32;
1250 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1251 auto RandomBufSize = [&]() -> VkDeviceSize {
1252 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1253 };
1254
1255 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1256 bufCreateInfo.size = BUF_SIZE;
1257 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1258
1259 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1260 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1261
1262 uint32_t memTypeIndex = UINT32_MAX;
1263 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1264
1265 VmaPoolCreateInfo poolCreateInfo = {};
1266 poolCreateInfo.blockSize = BLOCK_SIZE;
1267 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1268
1269 VmaPool pool;
1270 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1271
Adam Sawickie1681912018-11-23 17:50:12 +01001272 // Defragmentation of empty pool.
1273 {
1274 VmaDefragmentationInfo2 defragInfo = {};
1275 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1276 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1277 defragInfo.poolCount = 1;
1278 defragInfo.pPools = &pool;
1279
1280 VmaDefragmentationStats defragStats = {};
1281 VmaDefragmentationContext defragCtx = nullptr;
1282 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1283 TEST(res >= VK_SUCCESS);
1284 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1285 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1286 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1287 }
1288
Adam Sawickib8333fb2018-03-13 16:15:53 +01001289 std::vector<AllocInfo> allocations;
1290
1291 // persistentlyMappedOption = 0 - not persistently mapped.
1292 // persistentlyMappedOption = 1 - persistently mapped.
1293 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1294 {
1295 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1296 const bool persistentlyMapped = persistentlyMappedOption != 0;
1297
1298 // # Test 1
1299 // Buffers of fixed size.
1300 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1301 // Expected result: at least 1 block freed.
1302 {
1303 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1304 {
1305 AllocInfo allocInfo;
1306 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1307 allocations.push_back(allocInfo);
1308 }
1309
1310 for(size_t i = 1; i < allocations.size(); ++i)
1311 {
1312 DestroyAllocation(allocations[i]);
1313 allocations.erase(allocations.begin() + i);
1314 }
1315
1316 VmaDefragmentationStats defragStats;
1317 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001318 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1319 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001320
1321 ValidateAllocationsData(allocations.data(), allocations.size());
1322
1323 DestroyAllAllocations(allocations);
1324 }
1325
1326 // # Test 2
1327 // Buffers of fixed size.
1328 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1329 // Expected result: Each of 4 interations makes some progress.
1330 {
1331 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1332 {
1333 AllocInfo allocInfo;
1334 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1335 allocations.push_back(allocInfo);
1336 }
1337
1338 for(size_t i = 1; i < allocations.size(); ++i)
1339 {
1340 DestroyAllocation(allocations[i]);
1341 allocations.erase(allocations.begin() + i);
1342 }
1343
1344 VmaDefragmentationInfo defragInfo = {};
1345 defragInfo.maxAllocationsToMove = 1;
1346 defragInfo.maxBytesToMove = BUF_SIZE;
1347
1348 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1349 {
1350 VmaDefragmentationStats defragStats;
1351 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001352 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001353 }
1354
1355 ValidateAllocationsData(allocations.data(), allocations.size());
1356
1357 DestroyAllAllocations(allocations);
1358 }
1359
1360 // # Test 3
1361 // Buffers of variable size.
1362 // Create a number of buffers. Remove some percent of them.
1363 // Defragment while having some percent of them unmovable.
1364 // Expected result: Just simple validation.
1365 {
1366 for(size_t i = 0; i < 100; ++i)
1367 {
1368 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1369 localBufCreateInfo.size = RandomBufSize();
1370
1371 AllocInfo allocInfo;
1372 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1373 allocations.push_back(allocInfo);
1374 }
1375
1376 const uint32_t percentToDelete = 60;
1377 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1378 for(size_t i = 0; i < numberToDelete; ++i)
1379 {
1380 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1381 DestroyAllocation(allocations[indexToDelete]);
1382 allocations.erase(allocations.begin() + indexToDelete);
1383 }
1384
1385 // Non-movable allocations will be at the beginning of allocations array.
1386 const uint32_t percentNonMovable = 20;
1387 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1388 for(size_t i = 0; i < numberNonMovable; ++i)
1389 {
1390 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1391 if(indexNonMovable != i)
1392 std::swap(allocations[i], allocations[indexNonMovable]);
1393 }
1394
1395 VmaDefragmentationStats defragStats;
1396 Defragment(
1397 allocations.data() + numberNonMovable,
1398 allocations.size() - numberNonMovable,
1399 nullptr, &defragStats);
1400
1401 ValidateAllocationsData(allocations.data(), allocations.size());
1402
1403 DestroyAllAllocations(allocations);
1404 }
1405 }
1406
Adam Sawicki647cf242018-11-23 17:58:00 +01001407 /*
1408 Allocation that must be move to an overlapping place using memmove().
1409 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1410 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001411 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001412 {
1413 AllocInfo allocInfo[2];
1414
1415 bufCreateInfo.size = BUF_SIZE;
1416 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1417 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1418 bufCreateInfo.size = biggerBufSize;
1419 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1420
1421 DestroyAllocation(allocInfo[0]);
1422
1423 VmaDefragmentationStats defragStats;
1424 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1425 // If this fails, it means we couldn't do memmove with overlapping regions.
1426 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1427
1428 ValidateAllocationsData(&allocInfo[1], 1);
1429 DestroyAllocation(allocInfo[1]);
1430 }
1431
Adam Sawickib8333fb2018-03-13 16:15:53 +01001432 vmaDestroyPool(g_hAllocator, pool);
1433}
1434
Adam Sawicki52076eb2018-11-22 16:14:50 +01001435void TestDefragmentationWholePool()
1436{
1437 wprintf(L"Test defragmentation whole pool\n");
1438
1439 RandomNumberGenerator rand(668);
1440
1441 const VkDeviceSize BUF_SIZE = 0x10000;
1442 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1443
1444 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1445 bufCreateInfo.size = BUF_SIZE;
1446 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1447
1448 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1449 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1450
1451 uint32_t memTypeIndex = UINT32_MAX;
1452 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1453
1454 VmaPoolCreateInfo poolCreateInfo = {};
1455 poolCreateInfo.blockSize = BLOCK_SIZE;
1456 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1457
1458 VmaDefragmentationStats defragStats[2];
1459 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1460 {
1461 VmaPool pool;
1462 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1463
1464 std::vector<AllocInfo> allocations;
1465
1466 // Buffers of fixed size.
1467 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1468 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1469 {
1470 AllocInfo allocInfo;
1471 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1472 allocations.push_back(allocInfo);
1473 }
1474
1475 for(size_t i = 1; i < allocations.size(); ++i)
1476 {
1477 DestroyAllocation(allocations[i]);
1478 allocations.erase(allocations.begin() + i);
1479 }
1480
1481 VmaDefragmentationInfo2 defragInfo = {};
1482 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1483 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1484 std::vector<VmaAllocation> allocationsToDefrag;
1485 if(caseIndex == 0)
1486 {
1487 defragInfo.poolCount = 1;
1488 defragInfo.pPools = &pool;
1489 }
1490 else
1491 {
1492 const size_t allocCount = allocations.size();
1493 allocationsToDefrag.resize(allocCount);
1494 std::transform(
1495 allocations.begin(), allocations.end(),
1496 allocationsToDefrag.begin(),
1497 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1498 defragInfo.allocationCount = (uint32_t)allocCount;
1499 defragInfo.pAllocations = allocationsToDefrag.data();
1500 }
1501
1502 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1503 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1504 TEST(res >= VK_SUCCESS);
1505 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1506
1507 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1508
1509 ValidateAllocationsData(allocations.data(), allocations.size());
1510
1511 DestroyAllAllocations(allocations);
1512
1513 vmaDestroyPool(g_hAllocator, pool);
1514 }
1515
1516 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1517 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1518 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1519 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1520}
1521
Adam Sawickib8333fb2018-03-13 16:15:53 +01001522void TestDefragmentationFull()
1523{
1524 std::vector<AllocInfo> allocations;
1525
1526 // Create initial allocations.
1527 for(size_t i = 0; i < 400; ++i)
1528 {
1529 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001530 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001531 allocations.push_back(allocation);
1532 }
1533
1534 // Delete random allocations
1535 const size_t allocationsToDeletePercent = 80;
1536 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1537 for(size_t i = 0; i < allocationsToDelete; ++i)
1538 {
1539 size_t index = (size_t)rand() % allocations.size();
1540 DestroyAllocation(allocations[index]);
1541 allocations.erase(allocations.begin() + index);
1542 }
1543
1544 for(size_t i = 0; i < allocations.size(); ++i)
1545 ValidateAllocationData(allocations[i]);
1546
Adam Sawicki0667e332018-08-24 17:26:44 +02001547 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001548
1549 {
1550 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1551 for(size_t i = 0; i < allocations.size(); ++i)
1552 vmaAllocations[i] = allocations[i].m_Allocation;
1553
1554 const size_t nonMovablePercent = 0;
1555 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1556 for(size_t i = 0; i < nonMovableCount; ++i)
1557 {
1558 size_t index = (size_t)rand() % vmaAllocations.size();
1559 vmaAllocations.erase(vmaAllocations.begin() + index);
1560 }
1561
1562 const uint32_t defragCount = 1;
1563 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1564 {
1565 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1566
1567 VmaDefragmentationInfo defragmentationInfo;
1568 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1569 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1570
1571 wprintf(L"Defragmentation #%u\n", defragIndex);
1572
1573 time_point begTime = std::chrono::high_resolution_clock::now();
1574
1575 VmaDefragmentationStats stats;
1576 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001577 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001578
1579 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1580
1581 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1582 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1583 wprintf(L"Time: %.2f s\n", defragmentDuration);
1584
1585 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1586 {
1587 if(allocationsChanged[i])
1588 {
1589 RecreateAllocationResource(allocations[i]);
1590 }
1591 }
1592
1593 for(size_t i = 0; i < allocations.size(); ++i)
1594 ValidateAllocationData(allocations[i]);
1595
Adam Sawicki0667e332018-08-24 17:26:44 +02001596 //wchar_t fileName[MAX_PATH];
1597 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1598 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001599 }
1600 }
1601
1602 // Destroy all remaining allocations.
1603 DestroyAllAllocations(allocations);
1604}
1605
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001606static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001607{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001608 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001609 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001610
1611 std::vector<AllocInfo> allocations;
1612
1613 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001614 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1615 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001616 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001617 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1618 const size_t percentToLeave = 30;
1619 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001620 RandomNumberGenerator rand = { 234522 };
1621
1622 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001623
1624 VmaAllocationCreateInfo allocCreateInfo = {};
1625 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001626 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001627
1628 // Create all intended buffers.
1629 for(size_t i = 0; i < bufCount; ++i)
1630 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001631 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1632
1633 if(rand.Generate() % 100 < percentNonMovable)
1634 {
1635 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1636 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1637 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1638 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1639 }
1640 else
1641 {
1642 // Different usage just to see different color in output from VmaDumpVis.
1643 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1644 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1645 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1646 // And in JSON dump.
1647 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1648 }
1649
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001650 AllocInfo alloc;
1651 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1652 alloc.m_StartValue = rand.Generate();
1653 allocations.push_back(alloc);
1654 }
1655
1656 // Destroy some percentage of them.
1657 {
1658 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1659 for(size_t i = 0; i < buffersToDestroy; ++i)
1660 {
1661 const size_t index = rand.Generate() % allocations.size();
1662 allocations[index].Destroy();
1663 allocations.erase(allocations.begin() + index);
1664 }
1665 }
1666
1667 // Fill them with meaningful data.
1668 UploadGpuData(allocations.data(), allocations.size());
1669
Adam Sawickic6ede152018-11-16 17:04:14 +01001670 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001671 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001672 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001673
1674 // Defragment using GPU only.
1675 {
1676 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001677
Adam Sawickic6ede152018-11-16 17:04:14 +01001678 std::vector<VmaAllocation> allocationPtrs;
1679 std::vector<VkBool32> allocationChanged;
1680 std::vector<size_t> allocationOriginalIndex;
1681
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001682 for(size_t i = 0; i < allocCount; ++i)
1683 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001684 VmaAllocationInfo allocInfo = {};
1685 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1686 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1687 {
1688 allocationPtrs.push_back(allocations[i].m_Allocation);
1689 allocationChanged.push_back(VK_FALSE);
1690 allocationOriginalIndex.push_back(i);
1691 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001692 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001693
1694 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001695
1696 BeginSingleTimeCommands();
1697
1698 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001699 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001700 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001701 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001702 defragInfo.pAllocationsChanged = allocationChanged.data();
1703 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001704 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1705 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1706
1707 VmaDefragmentationStats stats = {};
1708 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1709 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1710 TEST(res >= VK_SUCCESS);
1711
1712 EndSingleTimeCommands();
1713
1714 vmaDefragmentationEnd(g_hAllocator, ctx);
1715
Adam Sawickic6ede152018-11-16 17:04:14 +01001716 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001717 {
1718 if(allocationChanged[i])
1719 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001720 const size_t origAllocIndex = allocationOriginalIndex[i];
1721 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001722 }
1723 }
1724
Adam Sawicki4d844e22019-01-24 16:21:05 +01001725 // If corruption detection is enabled, GPU defragmentation may not work on
1726 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001727 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001728 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1729 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001730 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001731 }
1732
1733 ValidateGpuData(allocations.data(), allocations.size());
1734
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001735 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001736 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001737
1738 // Destroy all remaining buffers.
1739 for(size_t i = allocations.size(); i--; )
1740 {
1741 allocations[i].Destroy();
1742 }
Adam Sawicki05704002018-11-08 16:07:29 +01001743
1744 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001745}
1746
Adam Sawickib8333fb2018-03-13 16:15:53 +01001747static void TestUserData()
1748{
1749 VkResult res;
1750
1751 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1752 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1753 bufCreateInfo.size = 0x10000;
1754
1755 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1756 {
1757 // Opaque pointer
1758 {
1759
1760 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1761 void* pointerToSomething = &res;
1762
1763 VmaAllocationCreateInfo allocCreateInfo = {};
1764 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1765 allocCreateInfo.pUserData = numberAsPointer;
1766 if(testIndex == 1)
1767 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1768
1769 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1770 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001771 TEST(res == VK_SUCCESS);
1772 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001773
1774 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001775 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001776
1777 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1778 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001779 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001780
1781 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1782 }
1783
1784 // String
1785 {
1786 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1787 const char* name2 = "2";
1788 const size_t name1Len = strlen(name1);
1789
1790 char* name1Buf = new char[name1Len + 1];
1791 strcpy_s(name1Buf, name1Len + 1, name1);
1792
1793 VmaAllocationCreateInfo allocCreateInfo = {};
1794 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1795 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1796 allocCreateInfo.pUserData = name1Buf;
1797 if(testIndex == 1)
1798 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1799
1800 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1801 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001802 TEST(res == VK_SUCCESS);
1803 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1804 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001805
1806 delete[] name1Buf;
1807
1808 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001809 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001810
1811 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1812 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001813 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001814
1815 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1816 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001817 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001818
1819 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1820 }
1821 }
1822}
1823
Adam Sawicki370ab182018-11-08 16:31:00 +01001824static void TestInvalidAllocations()
1825{
1826 VkResult res;
1827
1828 VmaAllocationCreateInfo allocCreateInfo = {};
1829 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1830
1831 // Try to allocate 0 bytes.
1832 {
1833 VkMemoryRequirements memReq = {};
1834 memReq.size = 0; // !!!
1835 memReq.alignment = 4;
1836 memReq.memoryTypeBits = UINT32_MAX;
1837 VmaAllocation alloc = VK_NULL_HANDLE;
1838 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1839 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1840 }
1841
1842 // Try to create buffer with size = 0.
1843 {
1844 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1845 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1846 bufCreateInfo.size = 0; // !!!
1847 VkBuffer buf = VK_NULL_HANDLE;
1848 VmaAllocation alloc = VK_NULL_HANDLE;
1849 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1850 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1851 }
1852
1853 // Try to create image with one dimension = 0.
1854 {
1855 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1856 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1857 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1858 imageCreateInfo.extent.width = 128;
1859 imageCreateInfo.extent.height = 0; // !!!
1860 imageCreateInfo.extent.depth = 1;
1861 imageCreateInfo.mipLevels = 1;
1862 imageCreateInfo.arrayLayers = 1;
1863 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1864 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1865 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1866 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1867 VkImage image = VK_NULL_HANDLE;
1868 VmaAllocation alloc = VK_NULL_HANDLE;
1869 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1870 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1871 }
1872}
1873
Adam Sawickib8333fb2018-03-13 16:15:53 +01001874static void TestMemoryRequirements()
1875{
1876 VkResult res;
1877 VkBuffer buf;
1878 VmaAllocation alloc;
1879 VmaAllocationInfo allocInfo;
1880
1881 const VkPhysicalDeviceMemoryProperties* memProps;
1882 vmaGetMemoryProperties(g_hAllocator, &memProps);
1883
1884 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1885 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1886 bufInfo.size = 128;
1887
1888 VmaAllocationCreateInfo allocCreateInfo = {};
1889
1890 // No requirements.
1891 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001892 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001893 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1894
1895 // Usage.
1896 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1897 allocCreateInfo.requiredFlags = 0;
1898 allocCreateInfo.preferredFlags = 0;
1899 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1900
1901 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001902 TEST(res == VK_SUCCESS);
1903 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001904 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1905
1906 // Required flags, preferred flags.
1907 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1908 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1909 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1910 allocCreateInfo.memoryTypeBits = 0;
1911
1912 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001913 TEST(res == VK_SUCCESS);
1914 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1915 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001916 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1917
1918 // memoryTypeBits.
1919 const uint32_t memType = allocInfo.memoryType;
1920 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1921 allocCreateInfo.requiredFlags = 0;
1922 allocCreateInfo.preferredFlags = 0;
1923 allocCreateInfo.memoryTypeBits = 1u << memType;
1924
1925 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001926 TEST(res == VK_SUCCESS);
1927 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001928 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1929
1930}
1931
1932static void TestBasics()
1933{
1934 VkResult res;
1935
1936 TestMemoryRequirements();
1937
1938 // Lost allocation
1939 {
1940 VmaAllocation alloc = VK_NULL_HANDLE;
1941 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001942 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001943
1944 VmaAllocationInfo allocInfo;
1945 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001946 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1947 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001948
1949 vmaFreeMemory(g_hAllocator, alloc);
1950 }
1951
1952 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1953 {
1954 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1955 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1956 bufCreateInfo.size = 128;
1957
1958 VmaAllocationCreateInfo allocCreateInfo = {};
1959 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1960 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1961
1962 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1963 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001964 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001965
1966 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1967
1968 // Same with OWN_MEMORY.
1969 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1970
1971 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001972 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001973
1974 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1975 }
1976
1977 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001978
1979 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001980}
1981
1982void TestHeapSizeLimit()
1983{
1984 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1985 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1986
1987 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1988 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1989 {
1990 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1991 }
1992
1993 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1994 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1995 allocatorCreateInfo.device = g_hDevice;
1996 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1997
1998 VmaAllocator hAllocator;
1999 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002000 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002001
2002 struct Item
2003 {
2004 VkBuffer hBuf;
2005 VmaAllocation hAlloc;
2006 };
2007 std::vector<Item> items;
2008
2009 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2010 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2011
2012 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
2013 VmaAllocationInfo ownAllocInfo;
2014 {
2015 VmaAllocationCreateInfo allocCreateInfo = {};
2016 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2017 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2018
2019 bufCreateInfo.size = BLOCK_SIZE / 2;
2020
2021 for(size_t i = 0; i < 2; ++i)
2022 {
2023 Item item;
2024 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002025 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002026 items.push_back(item);
2027 }
2028 }
2029
2030 // Create pool to make sure allocations must be out of this memory type.
2031 VmaPoolCreateInfo poolCreateInfo = {};
2032 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
2033 poolCreateInfo.blockSize = BLOCK_SIZE;
2034
2035 VmaPool hPool;
2036 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002037 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002038
2039 // 2. Allocate normal buffers from all the remaining memory.
2040 {
2041 VmaAllocationCreateInfo allocCreateInfo = {};
2042 allocCreateInfo.pool = hPool;
2043
2044 bufCreateInfo.size = BLOCK_SIZE / 2;
2045
2046 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2047 for(size_t i = 0; i < bufCount; ++i)
2048 {
2049 Item item;
2050 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002051 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002052 items.push_back(item);
2053 }
2054 }
2055
2056 // 3. Allocation of one more (even small) buffer should fail.
2057 {
2058 VmaAllocationCreateInfo allocCreateInfo = {};
2059 allocCreateInfo.pool = hPool;
2060
2061 bufCreateInfo.size = 128;
2062
2063 VkBuffer hBuf;
2064 VmaAllocation hAlloc;
2065 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002066 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002067 }
2068
2069 // Destroy everything.
2070 for(size_t i = items.size(); i--; )
2071 {
2072 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2073 }
2074
2075 vmaDestroyPool(hAllocator, hPool);
2076
2077 vmaDestroyAllocator(hAllocator);
2078}
2079
Adam Sawicki212a4a62018-06-14 15:44:45 +02002080#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002081static void TestDebugMargin()
2082{
2083 if(VMA_DEBUG_MARGIN == 0)
2084 {
2085 return;
2086 }
2087
2088 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002089 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002090
2091 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002092 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002093
2094 // Create few buffers of different size.
2095 const size_t BUF_COUNT = 10;
2096 BufferInfo buffers[BUF_COUNT];
2097 VmaAllocationInfo allocInfo[BUF_COUNT];
2098 for(size_t i = 0; i < 10; ++i)
2099 {
2100 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002101 // Last one will be mapped.
2102 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002103
2104 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002105 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002106 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002107 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002108
2109 if(i == BUF_COUNT - 1)
2110 {
2111 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002112 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002113 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2114 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2115 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002116 }
2117
2118 // Check if their offsets preserve margin between them.
2119 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2120 {
2121 if(lhs.deviceMemory != rhs.deviceMemory)
2122 {
2123 return lhs.deviceMemory < rhs.deviceMemory;
2124 }
2125 return lhs.offset < rhs.offset;
2126 });
2127 for(size_t i = 1; i < BUF_COUNT; ++i)
2128 {
2129 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2130 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002131 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002132 }
2133 }
2134
Adam Sawicki212a4a62018-06-14 15:44:45 +02002135 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002136 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002137
Adam Sawicki73b16652018-06-11 16:39:25 +02002138 // Destroy all buffers.
2139 for(size_t i = BUF_COUNT; i--; )
2140 {
2141 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2142 }
2143}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002144#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002145
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002146static void TestLinearAllocator()
2147{
2148 wprintf(L"Test linear allocator\n");
2149
2150 RandomNumberGenerator rand{645332};
2151
2152 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2153 sampleBufCreateInfo.size = 1024; // Whatever.
2154 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2155
2156 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2157 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2158
2159 VmaPoolCreateInfo poolCreateInfo = {};
2160 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002161 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002162
Adam Sawickiee082772018-06-20 17:45:49 +02002163 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002164 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2165 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2166
2167 VmaPool pool = nullptr;
2168 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002169 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002170
2171 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2172
2173 VmaAllocationCreateInfo allocCreateInfo = {};
2174 allocCreateInfo.pool = pool;
2175
2176 constexpr size_t maxBufCount = 100;
2177 std::vector<BufferInfo> bufInfo;
2178
2179 constexpr VkDeviceSize bufSizeMin = 16;
2180 constexpr VkDeviceSize bufSizeMax = 1024;
2181 VmaAllocationInfo allocInfo;
2182 VkDeviceSize prevOffset = 0;
2183
2184 // Test one-time free.
2185 for(size_t i = 0; i < 2; ++i)
2186 {
2187 // Allocate number of buffers of varying size that surely fit into this block.
2188 VkDeviceSize bufSumSize = 0;
2189 for(size_t i = 0; i < maxBufCount; ++i)
2190 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002191 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002192 BufferInfo newBufInfo;
2193 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2194 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002195 TEST(res == VK_SUCCESS);
2196 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002197 bufInfo.push_back(newBufInfo);
2198 prevOffset = allocInfo.offset;
2199 bufSumSize += bufCreateInfo.size;
2200 }
2201
2202 // Validate pool stats.
2203 VmaPoolStats stats;
2204 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002205 TEST(stats.size == poolCreateInfo.blockSize);
2206 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2207 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002208
2209 // Destroy the buffers in random order.
2210 while(!bufInfo.empty())
2211 {
2212 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2213 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2214 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2215 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2216 }
2217 }
2218
2219 // Test stack.
2220 {
2221 // Allocate number of buffers of varying size that surely fit into this block.
2222 for(size_t i = 0; i < maxBufCount; ++i)
2223 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002224 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002225 BufferInfo newBufInfo;
2226 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2227 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002228 TEST(res == VK_SUCCESS);
2229 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002230 bufInfo.push_back(newBufInfo);
2231 prevOffset = allocInfo.offset;
2232 }
2233
2234 // Destroy few buffers from top of the stack.
2235 for(size_t i = 0; i < maxBufCount / 5; ++i)
2236 {
2237 const BufferInfo& currBufInfo = bufInfo.back();
2238 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2239 bufInfo.pop_back();
2240 }
2241
2242 // Create some more
2243 for(size_t i = 0; i < maxBufCount / 5; ++i)
2244 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002245 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002246 BufferInfo newBufInfo;
2247 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2248 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002249 TEST(res == VK_SUCCESS);
2250 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002251 bufInfo.push_back(newBufInfo);
2252 prevOffset = allocInfo.offset;
2253 }
2254
2255 // Destroy the buffers in reverse order.
2256 while(!bufInfo.empty())
2257 {
2258 const BufferInfo& currBufInfo = bufInfo.back();
2259 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2260 bufInfo.pop_back();
2261 }
2262 }
2263
Adam Sawickiee082772018-06-20 17:45:49 +02002264 // Test ring buffer.
2265 {
2266 // Allocate number of buffers that surely fit into this block.
2267 bufCreateInfo.size = bufSizeMax;
2268 for(size_t i = 0; i < maxBufCount; ++i)
2269 {
2270 BufferInfo newBufInfo;
2271 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2272 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002273 TEST(res == VK_SUCCESS);
2274 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002275 bufInfo.push_back(newBufInfo);
2276 prevOffset = allocInfo.offset;
2277 }
2278
2279 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2280 const size_t buffersPerIter = maxBufCount / 10 - 1;
2281 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2282 for(size_t iter = 0; iter < iterCount; ++iter)
2283 {
2284 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2285 {
2286 const BufferInfo& currBufInfo = bufInfo.front();
2287 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2288 bufInfo.erase(bufInfo.begin());
2289 }
2290 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2291 {
2292 BufferInfo newBufInfo;
2293 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2294 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002295 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002296 bufInfo.push_back(newBufInfo);
2297 }
2298 }
2299
2300 // Allocate buffers until we reach out-of-memory.
2301 uint32_t debugIndex = 0;
2302 while(res == VK_SUCCESS)
2303 {
2304 BufferInfo newBufInfo;
2305 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2306 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2307 if(res == VK_SUCCESS)
2308 {
2309 bufInfo.push_back(newBufInfo);
2310 }
2311 else
2312 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002313 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002314 }
2315 ++debugIndex;
2316 }
2317
2318 // Destroy the buffers in random order.
2319 while(!bufInfo.empty())
2320 {
2321 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2322 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2323 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2324 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2325 }
2326 }
2327
Adam Sawicki680b2252018-08-22 14:47:32 +02002328 // Test double stack.
2329 {
2330 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2331 VkDeviceSize prevOffsetLower = 0;
2332 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2333 for(size_t i = 0; i < maxBufCount; ++i)
2334 {
2335 const bool upperAddress = (i % 2) != 0;
2336 if(upperAddress)
2337 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2338 else
2339 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002340 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002341 BufferInfo newBufInfo;
2342 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2343 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002344 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002345 if(upperAddress)
2346 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002347 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002348 prevOffsetUpper = allocInfo.offset;
2349 }
2350 else
2351 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002352 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002353 prevOffsetLower = allocInfo.offset;
2354 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002355 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002356 bufInfo.push_back(newBufInfo);
2357 }
2358
2359 // Destroy few buffers from top of the stack.
2360 for(size_t i = 0; i < maxBufCount / 5; ++i)
2361 {
2362 const BufferInfo& currBufInfo = bufInfo.back();
2363 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2364 bufInfo.pop_back();
2365 }
2366
2367 // Create some more
2368 for(size_t i = 0; i < maxBufCount / 5; ++i)
2369 {
2370 const bool upperAddress = (i % 2) != 0;
2371 if(upperAddress)
2372 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2373 else
2374 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002375 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002376 BufferInfo newBufInfo;
2377 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2378 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002379 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002380 bufInfo.push_back(newBufInfo);
2381 }
2382
2383 // Destroy the buffers in reverse order.
2384 while(!bufInfo.empty())
2385 {
2386 const BufferInfo& currBufInfo = bufInfo.back();
2387 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2388 bufInfo.pop_back();
2389 }
2390
2391 // Create buffers on both sides until we reach out of memory.
2392 prevOffsetLower = 0;
2393 prevOffsetUpper = poolCreateInfo.blockSize;
2394 res = VK_SUCCESS;
2395 for(size_t i = 0; res == VK_SUCCESS; ++i)
2396 {
2397 const bool upperAddress = (i % 2) != 0;
2398 if(upperAddress)
2399 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2400 else
2401 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002402 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002403 BufferInfo newBufInfo;
2404 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2405 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2406 if(res == VK_SUCCESS)
2407 {
2408 if(upperAddress)
2409 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002410 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002411 prevOffsetUpper = allocInfo.offset;
2412 }
2413 else
2414 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002415 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002416 prevOffsetLower = allocInfo.offset;
2417 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002418 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002419 bufInfo.push_back(newBufInfo);
2420 }
2421 }
2422
2423 // Destroy the buffers in random order.
2424 while(!bufInfo.empty())
2425 {
2426 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2427 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2428 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2429 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2430 }
2431
2432 // Create buffers on upper side only, constant size, until we reach out of memory.
2433 prevOffsetUpper = poolCreateInfo.blockSize;
2434 res = VK_SUCCESS;
2435 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2436 bufCreateInfo.size = bufSizeMax;
2437 for(size_t i = 0; res == VK_SUCCESS; ++i)
2438 {
2439 BufferInfo newBufInfo;
2440 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2441 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2442 if(res == VK_SUCCESS)
2443 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002444 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002445 prevOffsetUpper = allocInfo.offset;
2446 bufInfo.push_back(newBufInfo);
2447 }
2448 }
2449
2450 // Destroy the buffers in reverse order.
2451 while(!bufInfo.empty())
2452 {
2453 const BufferInfo& currBufInfo = bufInfo.back();
2454 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2455 bufInfo.pop_back();
2456 }
2457 }
2458
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002459 // Test ring buffer with lost allocations.
2460 {
2461 // Allocate number of buffers until pool is full.
2462 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2463 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2464 res = VK_SUCCESS;
2465 for(size_t i = 0; res == VK_SUCCESS; ++i)
2466 {
2467 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2468
Adam Sawickifd366b62019-01-24 15:26:43 +01002469 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002470
2471 BufferInfo newBufInfo;
2472 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2473 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2474 if(res == VK_SUCCESS)
2475 bufInfo.push_back(newBufInfo);
2476 }
2477
2478 // Free first half of it.
2479 {
2480 const size_t buffersToDelete = bufInfo.size() / 2;
2481 for(size_t i = 0; i < buffersToDelete; ++i)
2482 {
2483 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2484 }
2485 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2486 }
2487
2488 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002489 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002490 res = VK_SUCCESS;
2491 for(size_t i = 0; res == VK_SUCCESS; ++i)
2492 {
2493 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2494
Adam Sawickifd366b62019-01-24 15:26:43 +01002495 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002496
2497 BufferInfo newBufInfo;
2498 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2499 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2500 if(res == VK_SUCCESS)
2501 bufInfo.push_back(newBufInfo);
2502 }
2503
2504 VkDeviceSize firstNewOffset;
2505 {
2506 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2507
2508 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2509 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2510 bufCreateInfo.size = bufSizeMax;
2511
2512 BufferInfo newBufInfo;
2513 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2514 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002515 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002516 bufInfo.push_back(newBufInfo);
2517 firstNewOffset = allocInfo.offset;
2518
2519 // Make sure at least one buffer from the beginning became lost.
2520 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002521 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002522 }
2523
Adam Sawickifd366b62019-01-24 15:26:43 +01002524#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002525 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2526 size_t newCount = 1;
2527 for(;;)
2528 {
2529 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2530
Adam Sawickifd366b62019-01-24 15:26:43 +01002531 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002532
2533 BufferInfo newBufInfo;
2534 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2535 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01002536
Adam Sawickib8d34d52018-10-03 17:41:20 +02002537 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002538 bufInfo.push_back(newBufInfo);
2539 ++newCount;
2540 if(allocInfo.offset < firstNewOffset)
2541 break;
2542 }
Adam Sawickifd366b62019-01-24 15:26:43 +01002543#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002544
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002545 // Delete buffers that are lost.
2546 for(size_t i = bufInfo.size(); i--; )
2547 {
2548 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2549 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2550 {
2551 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2552 bufInfo.erase(bufInfo.begin() + i);
2553 }
2554 }
2555
2556 // Test vmaMakePoolAllocationsLost
2557 {
2558 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2559
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01002560 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002561 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002562 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002563
2564 size_t realLostAllocCount = 0;
2565 for(size_t i = 0; i < bufInfo.size(); ++i)
2566 {
2567 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2568 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2569 ++realLostAllocCount;
2570 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002571 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002572 }
2573
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002574 // Destroy all the buffers in forward order.
2575 for(size_t i = 0; i < bufInfo.size(); ++i)
2576 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2577 bufInfo.clear();
2578 }
2579
Adam Sawicki70a683e2018-08-24 15:36:32 +02002580 vmaDestroyPool(g_hAllocator, pool);
2581}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002582
Adam Sawicki70a683e2018-08-24 15:36:32 +02002583static void TestLinearAllocatorMultiBlock()
2584{
2585 wprintf(L"Test linear allocator multi block\n");
2586
2587 RandomNumberGenerator rand{345673};
2588
2589 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2590 sampleBufCreateInfo.size = 1024 * 1024;
2591 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2592
2593 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2594 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2595
2596 VmaPoolCreateInfo poolCreateInfo = {};
2597 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2598 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002599 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002600
2601 VmaPool pool = nullptr;
2602 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002603 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002604
2605 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2606
2607 VmaAllocationCreateInfo allocCreateInfo = {};
2608 allocCreateInfo.pool = pool;
2609
2610 std::vector<BufferInfo> bufInfo;
2611 VmaAllocationInfo allocInfo;
2612
2613 // Test one-time free.
2614 {
2615 // Allocate buffers until we move to a second block.
2616 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2617 for(uint32_t i = 0; ; ++i)
2618 {
2619 BufferInfo newBufInfo;
2620 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2621 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002622 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002623 bufInfo.push_back(newBufInfo);
2624 if(lastMem && allocInfo.deviceMemory != lastMem)
2625 {
2626 break;
2627 }
2628 lastMem = allocInfo.deviceMemory;
2629 }
2630
Adam Sawickib8d34d52018-10-03 17:41:20 +02002631 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002632
2633 // Make sure that pool has now two blocks.
2634 VmaPoolStats poolStats = {};
2635 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002636 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002637
2638 // Destroy all the buffers in random order.
2639 while(!bufInfo.empty())
2640 {
2641 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2642 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2643 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2644 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2645 }
2646
2647 // Make sure that pool has now at most one block.
2648 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002649 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002650 }
2651
2652 // Test stack.
2653 {
2654 // Allocate buffers until we move to a second block.
2655 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2656 for(uint32_t i = 0; ; ++i)
2657 {
2658 BufferInfo newBufInfo;
2659 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2660 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002661 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002662 bufInfo.push_back(newBufInfo);
2663 if(lastMem && allocInfo.deviceMemory != lastMem)
2664 {
2665 break;
2666 }
2667 lastMem = allocInfo.deviceMemory;
2668 }
2669
Adam Sawickib8d34d52018-10-03 17:41:20 +02002670 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002671
2672 // Add few more buffers.
2673 for(uint32_t i = 0; i < 5; ++i)
2674 {
2675 BufferInfo newBufInfo;
2676 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2677 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002678 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002679 bufInfo.push_back(newBufInfo);
2680 }
2681
2682 // Make sure that pool has now two blocks.
2683 VmaPoolStats poolStats = {};
2684 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002685 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002686
2687 // Delete half of buffers, LIFO.
2688 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2689 {
2690 const BufferInfo& currBufInfo = bufInfo.back();
2691 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2692 bufInfo.pop_back();
2693 }
2694
2695 // Add one more buffer.
2696 BufferInfo newBufInfo;
2697 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2698 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002699 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002700 bufInfo.push_back(newBufInfo);
2701
2702 // Make sure that pool has now one block.
2703 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002704 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002705
2706 // Delete all the remaining buffers, LIFO.
2707 while(!bufInfo.empty())
2708 {
2709 const BufferInfo& currBufInfo = bufInfo.back();
2710 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2711 bufInfo.pop_back();
2712 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002713 }
2714
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002715 vmaDestroyPool(g_hAllocator, pool);
2716}
2717
Adam Sawickifd11d752018-08-22 15:02:10 +02002718static void ManuallyTestLinearAllocator()
2719{
2720 VmaStats origStats;
2721 vmaCalculateStats(g_hAllocator, &origStats);
2722
2723 wprintf(L"Manually test linear allocator\n");
2724
2725 RandomNumberGenerator rand{645332};
2726
2727 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2728 sampleBufCreateInfo.size = 1024; // Whatever.
2729 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2730
2731 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2732 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2733
2734 VmaPoolCreateInfo poolCreateInfo = {};
2735 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002736 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002737
2738 poolCreateInfo.blockSize = 10 * 1024;
2739 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2740 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2741
2742 VmaPool pool = nullptr;
2743 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002744 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002745
2746 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2747
2748 VmaAllocationCreateInfo allocCreateInfo = {};
2749 allocCreateInfo.pool = pool;
2750
2751 std::vector<BufferInfo> bufInfo;
2752 VmaAllocationInfo allocInfo;
2753 BufferInfo newBufInfo;
2754
2755 // Test double stack.
2756 {
2757 /*
2758 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2759 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2760
2761 Totally:
2762 1 block allocated
2763 10240 Vulkan bytes
2764 6 new allocations
2765 2256 bytes in allocations
2766 */
2767
2768 bufCreateInfo.size = 32;
2769 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2770 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002771 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002772 bufInfo.push_back(newBufInfo);
2773
2774 bufCreateInfo.size = 1024;
2775 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2776 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002777 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002778 bufInfo.push_back(newBufInfo);
2779
2780 bufCreateInfo.size = 32;
2781 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2782 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002783 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002784 bufInfo.push_back(newBufInfo);
2785
2786 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2787
2788 bufCreateInfo.size = 128;
2789 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2790 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002791 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002792 bufInfo.push_back(newBufInfo);
2793
2794 bufCreateInfo.size = 1024;
2795 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2796 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002797 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002798 bufInfo.push_back(newBufInfo);
2799
2800 bufCreateInfo.size = 16;
2801 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2802 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002803 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002804 bufInfo.push_back(newBufInfo);
2805
2806 VmaStats currStats;
2807 vmaCalculateStats(g_hAllocator, &currStats);
2808 VmaPoolStats poolStats;
2809 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2810
2811 char* statsStr = nullptr;
2812 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2813
2814 // PUT BREAKPOINT HERE TO CHECK.
2815 // Inspect: currStats versus origStats, poolStats, statsStr.
2816 int I = 0;
2817
2818 vmaFreeStatsString(g_hAllocator, statsStr);
2819
2820 // Destroy the buffers in reverse order.
2821 while(!bufInfo.empty())
2822 {
2823 const BufferInfo& currBufInfo = bufInfo.back();
2824 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2825 bufInfo.pop_back();
2826 }
2827 }
2828
2829 vmaDestroyPool(g_hAllocator, pool);
2830}
2831
Adam Sawicki80927152018-09-07 17:27:23 +02002832static void BenchmarkAlgorithmsCase(FILE* file,
2833 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002834 bool empty,
2835 VmaAllocationCreateFlags allocStrategy,
2836 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002837{
2838 RandomNumberGenerator rand{16223};
2839
2840 const VkDeviceSize bufSizeMin = 32;
2841 const VkDeviceSize bufSizeMax = 1024;
2842 const size_t maxBufCapacity = 10000;
2843 const uint32_t iterationCount = 10;
2844
2845 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2846 sampleBufCreateInfo.size = bufSizeMax;
2847 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2848
2849 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2850 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2851
2852 VmaPoolCreateInfo poolCreateInfo = {};
2853 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002854 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002855
2856 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002857 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002858 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2859
2860 VmaPool pool = nullptr;
2861 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002862 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002863
2864 // Buffer created just to get memory requirements. Never bound to any memory.
2865 VkBuffer dummyBuffer = VK_NULL_HANDLE;
Adam Sawicki1f84f622019-07-02 13:40:01 +02002866 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002867 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002868
2869 VkMemoryRequirements memReq = {};
2870 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2871
Adam Sawicki1f84f622019-07-02 13:40:01 +02002872 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawicki0a607132018-08-24 11:18:41 +02002873
2874 VmaAllocationCreateInfo allocCreateInfo = {};
2875 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002876 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002877
2878 VmaAllocation alloc;
2879 std::vector<VmaAllocation> baseAllocations;
2880
2881 if(!empty)
2882 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002883 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002884 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002885 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002886 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01002887 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
2888 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02002889 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2890 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002891 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002892 baseAllocations.push_back(alloc);
2893 totalSize += memReq.size;
2894 }
2895
2896 // Delete half of them, choose randomly.
2897 size_t allocsToDelete = baseAllocations.size() / 2;
2898 for(size_t i = 0; i < allocsToDelete; ++i)
2899 {
2900 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2901 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2902 baseAllocations.erase(baseAllocations.begin() + index);
2903 }
2904 }
2905
2906 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002907 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002908 std::vector<VmaAllocation> testAllocations;
2909 testAllocations.reserve(allocCount);
2910 duration allocTotalDuration = duration::zero();
2911 duration freeTotalDuration = duration::zero();
2912 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2913 {
2914 // Allocations
2915 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2916 for(size_t i = 0; i < allocCount; ++i)
2917 {
2918 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2919 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002920 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002921 testAllocations.push_back(alloc);
2922 }
2923 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2924
2925 // Deallocations
2926 switch(freeOrder)
2927 {
2928 case FREE_ORDER::FORWARD:
2929 // Leave testAllocations unchanged.
2930 break;
2931 case FREE_ORDER::BACKWARD:
2932 std::reverse(testAllocations.begin(), testAllocations.end());
2933 break;
2934 case FREE_ORDER::RANDOM:
2935 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2936 break;
2937 default: assert(0);
2938 }
2939
2940 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2941 for(size_t i = 0; i < allocCount; ++i)
2942 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2943 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2944
2945 testAllocations.clear();
2946 }
2947
2948 // Delete baseAllocations
2949 while(!baseAllocations.empty())
2950 {
2951 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2952 baseAllocations.pop_back();
2953 }
2954
2955 vmaDestroyPool(g_hAllocator, pool);
2956
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002957 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2958 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2959
Adam Sawicki80927152018-09-07 17:27:23 +02002960 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2961 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002962 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002963 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002964 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002965 allocTotalSeconds,
2966 freeTotalSeconds);
2967
2968 if(file)
2969 {
2970 std::string currTime;
2971 CurrentTimeToStr(currTime);
2972
Adam Sawicki80927152018-09-07 17:27:23 +02002973 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002974 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002975 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002976 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002977 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002978 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2979 allocTotalSeconds,
2980 freeTotalSeconds);
2981 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002982}
2983
Adam Sawicki80927152018-09-07 17:27:23 +02002984static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002985{
Adam Sawicki80927152018-09-07 17:27:23 +02002986 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002987
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002988 if(file)
2989 {
2990 fprintf(file,
2991 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002992 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002993 "Allocation time (s),Deallocation time (s)\n");
2994 }
2995
Adam Sawicki0a607132018-08-24 11:18:41 +02002996 uint32_t freeOrderCount = 1;
2997 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2998 freeOrderCount = 3;
2999 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
3000 freeOrderCount = 2;
3001
3002 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003003 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003004
3005 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3006 {
3007 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3008 switch(freeOrderIndex)
3009 {
3010 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3011 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3012 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3013 default: assert(0);
3014 }
3015
3016 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3017 {
Adam Sawicki80927152018-09-07 17:27:23 +02003018 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003019 {
Adam Sawicki80927152018-09-07 17:27:23 +02003020 uint32_t algorithm = 0;
3021 switch(algorithmIndex)
3022 {
3023 case 0:
3024 break;
3025 case 1:
3026 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3027 break;
3028 case 2:
3029 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3030 break;
3031 default:
3032 assert(0);
3033 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003034
Adam Sawicki80927152018-09-07 17:27:23 +02003035 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003036 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3037 {
3038 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003039 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003040 {
3041 switch(allocStrategyIndex)
3042 {
3043 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3044 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3045 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3046 default: assert(0);
3047 }
3048 }
3049
Adam Sawicki80927152018-09-07 17:27:23 +02003050 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003051 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003052 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003053 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003054 strategy,
3055 freeOrder); // freeOrder
3056 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003057 }
3058 }
3059 }
3060}
3061
Adam Sawickib8333fb2018-03-13 16:15:53 +01003062static void TestPool_SameSize()
3063{
3064 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3065 const size_t BUF_COUNT = 100;
3066 VkResult res;
3067
3068 RandomNumberGenerator rand{123};
3069
3070 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3071 bufferInfo.size = BUF_SIZE;
3072 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3073
3074 uint32_t memoryTypeBits = UINT32_MAX;
3075 {
3076 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003077 res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003078 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003079
3080 VkMemoryRequirements memReq;
3081 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3082 memoryTypeBits = memReq.memoryTypeBits;
3083
Adam Sawicki1f84f622019-07-02 13:40:01 +02003084 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003085 }
3086
3087 VmaAllocationCreateInfo poolAllocInfo = {};
3088 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3089 uint32_t memTypeIndex;
3090 res = vmaFindMemoryTypeIndex(
3091 g_hAllocator,
3092 memoryTypeBits,
3093 &poolAllocInfo,
3094 &memTypeIndex);
3095
3096 VmaPoolCreateInfo poolCreateInfo = {};
3097 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3098 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3099 poolCreateInfo.minBlockCount = 1;
3100 poolCreateInfo.maxBlockCount = 4;
3101 poolCreateInfo.frameInUseCount = 0;
3102
3103 VmaPool pool;
3104 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003105 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003106
3107 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3108
3109 VmaAllocationCreateInfo allocInfo = {};
3110 allocInfo.pool = pool;
3111 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3112 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3113
3114 struct BufItem
3115 {
3116 VkBuffer Buf;
3117 VmaAllocation Alloc;
3118 };
3119 std::vector<BufItem> items;
3120
3121 // Fill entire pool.
3122 for(size_t i = 0; i < BUF_COUNT; ++i)
3123 {
3124 BufItem item;
3125 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003126 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003127 items.push_back(item);
3128 }
3129
3130 // Make sure that another allocation would fail.
3131 {
3132 BufItem item;
3133 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003134 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003135 }
3136
3137 // Validate that no buffer is lost. Also check that they are not mapped.
3138 for(size_t i = 0; i < items.size(); ++i)
3139 {
3140 VmaAllocationInfo allocInfo;
3141 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003142 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3143 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003144 }
3145
3146 // Free some percent of random items.
3147 {
3148 const size_t PERCENT_TO_FREE = 10;
3149 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3150 for(size_t i = 0; i < itemsToFree; ++i)
3151 {
3152 size_t index = (size_t)rand.Generate() % items.size();
3153 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3154 items.erase(items.begin() + index);
3155 }
3156 }
3157
3158 // Randomly allocate and free items.
3159 {
3160 const size_t OPERATION_COUNT = BUF_COUNT;
3161 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3162 {
3163 bool allocate = rand.Generate() % 2 != 0;
3164 if(allocate)
3165 {
3166 if(items.size() < BUF_COUNT)
3167 {
3168 BufItem item;
3169 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003170 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003171 items.push_back(item);
3172 }
3173 }
3174 else // Free
3175 {
3176 if(!items.empty())
3177 {
3178 size_t index = (size_t)rand.Generate() % items.size();
3179 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3180 items.erase(items.begin() + index);
3181 }
3182 }
3183 }
3184 }
3185
3186 // Allocate up to maximum.
3187 while(items.size() < BUF_COUNT)
3188 {
3189 BufItem item;
3190 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003191 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003192 items.push_back(item);
3193 }
3194
3195 // Validate that no buffer is lost.
3196 for(size_t i = 0; i < items.size(); ++i)
3197 {
3198 VmaAllocationInfo allocInfo;
3199 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003200 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003201 }
3202
3203 // Next frame.
3204 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3205
3206 // Allocate another BUF_COUNT buffers.
3207 for(size_t i = 0; i < BUF_COUNT; ++i)
3208 {
3209 BufItem item;
3210 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003211 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003212 items.push_back(item);
3213 }
3214
3215 // Make sure the first BUF_COUNT is lost. Delete them.
3216 for(size_t i = 0; i < BUF_COUNT; ++i)
3217 {
3218 VmaAllocationInfo allocInfo;
3219 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003220 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003221 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3222 }
3223 items.erase(items.begin(), items.begin() + BUF_COUNT);
3224
3225 // Validate that no buffer is lost.
3226 for(size_t i = 0; i < items.size(); ++i)
3227 {
3228 VmaAllocationInfo allocInfo;
3229 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003230 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003231 }
3232
3233 // Free one item.
3234 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3235 items.pop_back();
3236
3237 // Validate statistics.
3238 {
3239 VmaPoolStats poolStats = {};
3240 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003241 TEST(poolStats.allocationCount == items.size());
3242 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3243 TEST(poolStats.unusedRangeCount == 1);
3244 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3245 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003246 }
3247
3248 // Free all remaining items.
3249 for(size_t i = items.size(); i--; )
3250 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3251 items.clear();
3252
3253 // Allocate maximum items again.
3254 for(size_t i = 0; i < BUF_COUNT; ++i)
3255 {
3256 BufItem item;
3257 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003258 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003259 items.push_back(item);
3260 }
3261
3262 // Delete every other item.
3263 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3264 {
3265 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3266 items.erase(items.begin() + i);
3267 }
3268
3269 // Defragment!
3270 {
3271 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3272 for(size_t i = 0; i < items.size(); ++i)
3273 allocationsToDefragment[i] = items[i].Alloc;
3274
3275 VmaDefragmentationStats defragmentationStats;
3276 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003277 TEST(res == VK_SUCCESS);
3278 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003279 }
3280
3281 // Free all remaining items.
3282 for(size_t i = items.size(); i--; )
3283 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3284 items.clear();
3285
3286 ////////////////////////////////////////////////////////////////////////////////
3287 // Test for vmaMakePoolAllocationsLost
3288
3289 // Allocate 4 buffers on frame 10.
3290 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3291 for(size_t i = 0; i < 4; ++i)
3292 {
3293 BufItem item;
3294 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003295 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003296 items.push_back(item);
3297 }
3298
3299 // Touch first 2 of them on frame 11.
3300 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3301 for(size_t i = 0; i < 2; ++i)
3302 {
3303 VmaAllocationInfo allocInfo;
3304 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3305 }
3306
3307 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3308 size_t lostCount = 0xDEADC0DE;
3309 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003310 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003311
3312 // Make another call. Now 0 should be lost.
3313 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003314 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003315
3316 // Make another call, with null count. Should not crash.
3317 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3318
3319 // END: Free all remaining items.
3320 for(size_t i = items.size(); i--; )
3321 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3322
3323 items.clear();
3324
Adam Sawickid2924172018-06-11 12:48:46 +02003325 ////////////////////////////////////////////////////////////////////////////////
3326 // Test for allocation too large for pool
3327
3328 {
3329 VmaAllocationCreateInfo allocCreateInfo = {};
3330 allocCreateInfo.pool = pool;
3331
3332 VkMemoryRequirements memReq;
3333 memReq.memoryTypeBits = UINT32_MAX;
3334 memReq.alignment = 1;
3335 memReq.size = poolCreateInfo.blockSize + 4;
3336
3337 VmaAllocation alloc = nullptr;
3338 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003339 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003340 }
3341
Adam Sawickib8333fb2018-03-13 16:15:53 +01003342 vmaDestroyPool(g_hAllocator, pool);
3343}
3344
Adam Sawickie44c6262018-06-15 14:30:39 +02003345static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3346{
3347 const uint8_t* pBytes = (const uint8_t*)pMemory;
3348 for(size_t i = 0; i < size; ++i)
3349 {
3350 if(pBytes[i] != pattern)
3351 {
3352 return false;
3353 }
3354 }
3355 return true;
3356}
3357
3358static void TestAllocationsInitialization()
3359{
3360 VkResult res;
3361
3362 const size_t BUF_SIZE = 1024;
3363
3364 // Create pool.
3365
3366 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3367 bufInfo.size = BUF_SIZE;
3368 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3369
3370 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3371 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3372
3373 VmaPoolCreateInfo poolCreateInfo = {};
3374 poolCreateInfo.blockSize = BUF_SIZE * 10;
3375 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3376 poolCreateInfo.maxBlockCount = 1;
3377 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003378 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003379
3380 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3381 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003382 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003383
3384 // Create one persistently mapped buffer to keep memory of this block mapped,
3385 // so that pointer to mapped data will remain (more or less...) valid even
3386 // after destruction of other allocations.
3387
3388 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3389 VkBuffer firstBuf;
3390 VmaAllocation firstAlloc;
3391 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003392 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003393
3394 // Test buffers.
3395
3396 for(uint32_t i = 0; i < 2; ++i)
3397 {
3398 const bool persistentlyMapped = i == 0;
3399 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3400 VkBuffer buf;
3401 VmaAllocation alloc;
3402 VmaAllocationInfo allocInfo;
3403 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003404 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003405
3406 void* pMappedData;
3407 if(!persistentlyMapped)
3408 {
3409 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003410 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003411 }
3412 else
3413 {
3414 pMappedData = allocInfo.pMappedData;
3415 }
3416
3417 // Validate initialized content
3418 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003419 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003420
3421 if(!persistentlyMapped)
3422 {
3423 vmaUnmapMemory(g_hAllocator, alloc);
3424 }
3425
3426 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3427
3428 // Validate freed content
3429 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003430 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003431 }
3432
3433 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3434 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3435}
3436
Adam Sawickib8333fb2018-03-13 16:15:53 +01003437static void TestPool_Benchmark(
3438 PoolTestResult& outResult,
3439 const PoolTestConfig& config)
3440{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003441 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003442
3443 RandomNumberGenerator mainRand{config.RandSeed};
3444
3445 uint32_t allocationSizeProbabilitySum = std::accumulate(
3446 config.AllocationSizes.begin(),
3447 config.AllocationSizes.end(),
3448 0u,
3449 [](uint32_t sum, const AllocationSize& allocSize) {
3450 return sum + allocSize.Probability;
3451 });
3452
3453 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3454 bufferInfo.size = 256; // Whatever.
3455 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3456
3457 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3458 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3459 imageInfo.extent.width = 256; // Whatever.
3460 imageInfo.extent.height = 256; // Whatever.
3461 imageInfo.extent.depth = 1;
3462 imageInfo.mipLevels = 1;
3463 imageInfo.arrayLayers = 1;
3464 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3465 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3466 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3467 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3468 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3469
3470 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3471 {
3472 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003473 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003474 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003475
3476 VkMemoryRequirements memReq;
3477 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3478 bufferMemoryTypeBits = memReq.memoryTypeBits;
3479
Adam Sawicki1f84f622019-07-02 13:40:01 +02003480 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003481 }
3482
3483 uint32_t imageMemoryTypeBits = UINT32_MAX;
3484 {
3485 VkImage dummyImage;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003486 VkResult res = vkCreateImage(g_hDevice, &imageInfo, g_Allocs, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003487 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003488
3489 VkMemoryRequirements memReq;
3490 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3491 imageMemoryTypeBits = memReq.memoryTypeBits;
3492
Adam Sawicki1f84f622019-07-02 13:40:01 +02003493 vkDestroyImage(g_hDevice, dummyImage, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003494 }
3495
3496 uint32_t memoryTypeBits = 0;
3497 if(config.UsesBuffers() && config.UsesImages())
3498 {
3499 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3500 if(memoryTypeBits == 0)
3501 {
3502 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3503 return;
3504 }
3505 }
3506 else if(config.UsesBuffers())
3507 memoryTypeBits = bufferMemoryTypeBits;
3508 else if(config.UsesImages())
3509 memoryTypeBits = imageMemoryTypeBits;
3510 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003511 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003512
3513 VmaPoolCreateInfo poolCreateInfo = {};
3514 poolCreateInfo.memoryTypeIndex = 0;
3515 poolCreateInfo.minBlockCount = 1;
3516 poolCreateInfo.maxBlockCount = 1;
3517 poolCreateInfo.blockSize = config.PoolSize;
3518 poolCreateInfo.frameInUseCount = 1;
3519
3520 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3521 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3522 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3523
3524 VmaPool pool;
3525 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003526 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003527
3528 // Start time measurement - after creating pool and initializing data structures.
3529 time_point timeBeg = std::chrono::high_resolution_clock::now();
3530
3531 ////////////////////////////////////////////////////////////////////////////////
3532 // ThreadProc
3533 auto ThreadProc = [&](
3534 PoolTestThreadResult* outThreadResult,
3535 uint32_t randSeed,
3536 HANDLE frameStartEvent,
3537 HANDLE frameEndEvent) -> void
3538 {
3539 RandomNumberGenerator threadRand{randSeed};
3540
3541 outThreadResult->AllocationTimeMin = duration::max();
3542 outThreadResult->AllocationTimeSum = duration::zero();
3543 outThreadResult->AllocationTimeMax = duration::min();
3544 outThreadResult->DeallocationTimeMin = duration::max();
3545 outThreadResult->DeallocationTimeSum = duration::zero();
3546 outThreadResult->DeallocationTimeMax = duration::min();
3547 outThreadResult->AllocationCount = 0;
3548 outThreadResult->DeallocationCount = 0;
3549 outThreadResult->LostAllocationCount = 0;
3550 outThreadResult->LostAllocationTotalSize = 0;
3551 outThreadResult->FailedAllocationCount = 0;
3552 outThreadResult->FailedAllocationTotalSize = 0;
3553
3554 struct Item
3555 {
3556 VkDeviceSize BufferSize;
3557 VkExtent2D ImageSize;
3558 VkBuffer Buf;
3559 VkImage Image;
3560 VmaAllocation Alloc;
3561
3562 VkDeviceSize CalcSizeBytes() const
3563 {
3564 return BufferSize +
3565 ImageSize.width * ImageSize.height * 4;
3566 }
3567 };
3568 std::vector<Item> unusedItems, usedItems;
3569
3570 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3571
3572 // Create all items - all unused, not yet allocated.
3573 for(size_t i = 0; i < threadTotalItemCount; ++i)
3574 {
3575 Item item = {};
3576
3577 uint32_t allocSizeIndex = 0;
3578 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3579 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3580 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3581
3582 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3583 if(allocSize.BufferSizeMax > 0)
3584 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003585 TEST(allocSize.BufferSizeMin > 0);
3586 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003587 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3588 item.BufferSize = allocSize.BufferSizeMin;
3589 else
3590 {
3591 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3592 item.BufferSize = item.BufferSize / 16 * 16;
3593 }
3594 }
3595 else
3596 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003597 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003598 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3599 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3600 else
3601 {
3602 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3603 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3604 }
3605 }
3606
3607 unusedItems.push_back(item);
3608 }
3609
3610 auto Allocate = [&](Item& item) -> VkResult
3611 {
3612 VmaAllocationCreateInfo allocCreateInfo = {};
3613 allocCreateInfo.pool = pool;
3614 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3615 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3616
3617 if(item.BufferSize)
3618 {
3619 bufferInfo.size = item.BufferSize;
3620 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3621 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3622 }
3623 else
3624 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003625 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003626
3627 imageInfo.extent.width = item.ImageSize.width;
3628 imageInfo.extent.height = item.ImageSize.height;
3629 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3630 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3631 }
3632 };
3633
3634 ////////////////////////////////////////////////////////////////////////////////
3635 // Frames
3636 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3637 {
3638 WaitForSingleObject(frameStartEvent, INFINITE);
3639
3640 // Always make some percent of used bufs unused, to choose different used ones.
3641 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3642 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3643 {
3644 size_t index = threadRand.Generate() % usedItems.size();
3645 unusedItems.push_back(usedItems[index]);
3646 usedItems.erase(usedItems.begin() + index);
3647 }
3648
3649 // Determine which bufs we want to use in this frame.
3650 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3651 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003652 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003653 // Move some used to unused.
3654 while(usedBufCount < usedItems.size())
3655 {
3656 size_t index = threadRand.Generate() % usedItems.size();
3657 unusedItems.push_back(usedItems[index]);
3658 usedItems.erase(usedItems.begin() + index);
3659 }
3660 // Move some unused to used.
3661 while(usedBufCount > usedItems.size())
3662 {
3663 size_t index = threadRand.Generate() % unusedItems.size();
3664 usedItems.push_back(unusedItems[index]);
3665 unusedItems.erase(unusedItems.begin() + index);
3666 }
3667
3668 uint32_t touchExistingCount = 0;
3669 uint32_t touchLostCount = 0;
3670 uint32_t createSucceededCount = 0;
3671 uint32_t createFailedCount = 0;
3672
3673 // Touch all used bufs. If not created or lost, allocate.
3674 for(size_t i = 0; i < usedItems.size(); ++i)
3675 {
3676 Item& item = usedItems[i];
3677 // Not yet created.
3678 if(item.Alloc == VK_NULL_HANDLE)
3679 {
3680 res = Allocate(item);
3681 ++outThreadResult->AllocationCount;
3682 if(res != VK_SUCCESS)
3683 {
3684 item.Alloc = VK_NULL_HANDLE;
3685 item.Buf = VK_NULL_HANDLE;
3686 ++outThreadResult->FailedAllocationCount;
3687 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3688 ++createFailedCount;
3689 }
3690 else
3691 ++createSucceededCount;
3692 }
3693 else
3694 {
3695 // Touch.
3696 VmaAllocationInfo allocInfo;
3697 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3698 // Lost.
3699 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3700 {
3701 ++touchLostCount;
3702
3703 // Destroy.
3704 {
3705 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3706 if(item.Buf)
3707 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3708 else
3709 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3710 ++outThreadResult->DeallocationCount;
3711 }
3712 item.Alloc = VK_NULL_HANDLE;
3713 item.Buf = VK_NULL_HANDLE;
3714
3715 ++outThreadResult->LostAllocationCount;
3716 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3717
3718 // Recreate.
3719 res = Allocate(item);
3720 ++outThreadResult->AllocationCount;
3721 // Creation failed.
3722 if(res != VK_SUCCESS)
3723 {
3724 ++outThreadResult->FailedAllocationCount;
3725 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3726 ++createFailedCount;
3727 }
3728 else
3729 ++createSucceededCount;
3730 }
3731 else
3732 ++touchExistingCount;
3733 }
3734 }
3735
3736 /*
3737 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3738 randSeed, frameIndex,
3739 touchExistingCount, touchLostCount,
3740 createSucceededCount, createFailedCount);
3741 */
3742
3743 SetEvent(frameEndEvent);
3744 }
3745
3746 // Free all remaining items.
3747 for(size_t i = usedItems.size(); i--; )
3748 {
3749 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3750 if(usedItems[i].Buf)
3751 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3752 else
3753 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3754 ++outThreadResult->DeallocationCount;
3755 }
3756 for(size_t i = unusedItems.size(); i--; )
3757 {
3758 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3759 if(unusedItems[i].Buf)
3760 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3761 else
3762 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3763 ++outThreadResult->DeallocationCount;
3764 }
3765 };
3766
3767 // Launch threads.
3768 uint32_t threadRandSeed = mainRand.Generate();
3769 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3770 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3771 std::vector<std::thread> bkgThreads;
3772 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3773 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3774 {
3775 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3776 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3777 bkgThreads.emplace_back(std::bind(
3778 ThreadProc,
3779 &threadResults[threadIndex],
3780 threadRandSeed + threadIndex,
3781 frameStartEvents[threadIndex],
3782 frameEndEvents[threadIndex]));
3783 }
3784
3785 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003786 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003787 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3788 {
3789 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3790 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3791 SetEvent(frameStartEvents[threadIndex]);
3792 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3793 }
3794
3795 // Wait for threads finished
3796 for(size_t i = 0; i < bkgThreads.size(); ++i)
3797 {
3798 bkgThreads[i].join();
3799 CloseHandle(frameEndEvents[i]);
3800 CloseHandle(frameStartEvents[i]);
3801 }
3802 bkgThreads.clear();
3803
3804 // Finish time measurement - before destroying pool.
3805 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3806
3807 vmaDestroyPool(g_hAllocator, pool);
3808
3809 outResult.AllocationTimeMin = duration::max();
3810 outResult.AllocationTimeAvg = duration::zero();
3811 outResult.AllocationTimeMax = duration::min();
3812 outResult.DeallocationTimeMin = duration::max();
3813 outResult.DeallocationTimeAvg = duration::zero();
3814 outResult.DeallocationTimeMax = duration::min();
3815 outResult.LostAllocationCount = 0;
3816 outResult.LostAllocationTotalSize = 0;
3817 outResult.FailedAllocationCount = 0;
3818 outResult.FailedAllocationTotalSize = 0;
3819 size_t allocationCount = 0;
3820 size_t deallocationCount = 0;
3821 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3822 {
3823 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3824 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3825 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3826 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3827 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3828 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3829 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3830 allocationCount += threadResult.AllocationCount;
3831 deallocationCount += threadResult.DeallocationCount;
3832 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3833 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3834 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3835 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3836 }
3837 if(allocationCount)
3838 outResult.AllocationTimeAvg /= allocationCount;
3839 if(deallocationCount)
3840 outResult.DeallocationTimeAvg /= deallocationCount;
3841}
3842
3843static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3844{
3845 if(ptr1 < ptr2)
3846 return ptr1 + size1 > ptr2;
3847 else if(ptr2 < ptr1)
3848 return ptr2 + size2 > ptr1;
3849 else
3850 return true;
3851}
3852
3853static void TestMapping()
3854{
3855 wprintf(L"Testing mapping...\n");
3856
3857 VkResult res;
3858 uint32_t memTypeIndex = UINT32_MAX;
3859
3860 enum TEST
3861 {
3862 TEST_NORMAL,
3863 TEST_POOL,
3864 TEST_DEDICATED,
3865 TEST_COUNT
3866 };
3867 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3868 {
3869 VmaPool pool = nullptr;
3870 if(testIndex == TEST_POOL)
3871 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003872 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003873 VmaPoolCreateInfo poolInfo = {};
3874 poolInfo.memoryTypeIndex = memTypeIndex;
3875 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003876 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003877 }
3878
3879 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3880 bufInfo.size = 0x10000;
3881 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3882
3883 VmaAllocationCreateInfo allocCreateInfo = {};
3884 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3885 allocCreateInfo.pool = pool;
3886 if(testIndex == TEST_DEDICATED)
3887 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3888
3889 VmaAllocationInfo allocInfo;
3890
3891 // Mapped manually
3892
3893 // Create 2 buffers.
3894 BufferInfo bufferInfos[3];
3895 for(size_t i = 0; i < 2; ++i)
3896 {
3897 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3898 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003899 TEST(res == VK_SUCCESS);
3900 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003901 memTypeIndex = allocInfo.memoryType;
3902 }
3903
3904 // Map buffer 0.
3905 char* data00 = nullptr;
3906 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003907 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003908 data00[0xFFFF] = data00[0];
3909
3910 // Map buffer 0 second time.
3911 char* data01 = nullptr;
3912 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003913 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003914
3915 // Map buffer 1.
3916 char* data1 = nullptr;
3917 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003918 TEST(res == VK_SUCCESS && data1 != nullptr);
3919 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01003920 data1[0xFFFF] = data1[0];
3921
3922 // Unmap buffer 0 two times.
3923 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3924 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3925 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003926 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003927
3928 // Unmap buffer 1.
3929 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
3930 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003931 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003932
3933 // Create 3rd buffer - persistently mapped.
3934 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3935 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3936 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003937 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003938
3939 // Map buffer 2.
3940 char* data2 = nullptr;
3941 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003942 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003943 data2[0xFFFF] = data2[0];
3944
3945 // Unmap buffer 2.
3946 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
3947 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003948 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003949
3950 // Destroy all buffers.
3951 for(size_t i = 3; i--; )
3952 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
3953
3954 vmaDestroyPool(g_hAllocator, pool);
3955 }
3956}
3957
Adam Sawickidaa6a552019-06-25 15:26:37 +02003958// Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
3959static void TestDeviceLocalMapped()
3960{
3961 VkResult res;
3962
3963 for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
3964 {
3965 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3966 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3967 bufCreateInfo.size = 4096;
3968
3969 VmaPool pool = VK_NULL_HANDLE;
3970 VmaAllocationCreateInfo allocCreateInfo = {};
3971 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3972 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3973 if(testIndex == 2)
3974 {
3975 VmaPoolCreateInfo poolCreateInfo = {};
3976 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3977 TEST(res == VK_SUCCESS);
3978 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
3979 TEST(res == VK_SUCCESS);
3980 allocCreateInfo.pool = pool;
3981 }
3982 else if(testIndex == 1)
3983 {
3984 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3985 }
3986
3987 VkBuffer buf = VK_NULL_HANDLE;
3988 VmaAllocation alloc = VK_NULL_HANDLE;
3989 VmaAllocationInfo allocInfo = {};
3990 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
3991 TEST(res == VK_SUCCESS && alloc);
3992
3993 VkMemoryPropertyFlags memTypeFlags = 0;
3994 vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
3995 const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
3996 TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
3997
3998 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3999 vmaDestroyPool(g_hAllocator, pool);
4000 }
4001}
4002
Adam Sawickib8333fb2018-03-13 16:15:53 +01004003static void TestMappingMultithreaded()
4004{
4005 wprintf(L"Testing mapping multithreaded...\n");
4006
4007 static const uint32_t threadCount = 16;
4008 static const uint32_t bufferCount = 1024;
4009 static const uint32_t threadBufferCount = bufferCount / threadCount;
4010
4011 VkResult res;
4012 volatile uint32_t memTypeIndex = UINT32_MAX;
4013
4014 enum TEST
4015 {
4016 TEST_NORMAL,
4017 TEST_POOL,
4018 TEST_DEDICATED,
4019 TEST_COUNT
4020 };
4021 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4022 {
4023 VmaPool pool = nullptr;
4024 if(testIndex == TEST_POOL)
4025 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004026 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004027 VmaPoolCreateInfo poolInfo = {};
4028 poolInfo.memoryTypeIndex = memTypeIndex;
4029 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004030 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004031 }
4032
4033 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4034 bufCreateInfo.size = 0x10000;
4035 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4036
4037 VmaAllocationCreateInfo allocCreateInfo = {};
4038 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4039 allocCreateInfo.pool = pool;
4040 if(testIndex == TEST_DEDICATED)
4041 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4042
4043 std::thread threads[threadCount];
4044 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4045 {
4046 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4047 // ======== THREAD FUNCTION ========
4048
4049 RandomNumberGenerator rand{threadIndex};
4050
4051 enum class MODE
4052 {
4053 // Don't map this buffer at all.
4054 DONT_MAP,
4055 // Map and quickly unmap.
4056 MAP_FOR_MOMENT,
4057 // Map and unmap before destruction.
4058 MAP_FOR_LONGER,
4059 // Map two times. Quickly unmap, second unmap before destruction.
4060 MAP_TWO_TIMES,
4061 // Create this buffer as persistently mapped.
4062 PERSISTENTLY_MAPPED,
4063 COUNT
4064 };
4065 std::vector<BufferInfo> bufInfos{threadBufferCount};
4066 std::vector<MODE> bufModes{threadBufferCount};
4067
4068 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4069 {
4070 BufferInfo& bufInfo = bufInfos[bufferIndex];
4071 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4072 bufModes[bufferIndex] = mode;
4073
4074 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4075 if(mode == MODE::PERSISTENTLY_MAPPED)
4076 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4077
4078 VmaAllocationInfo allocInfo;
4079 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4080 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004081 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004082
4083 if(memTypeIndex == UINT32_MAX)
4084 memTypeIndex = allocInfo.memoryType;
4085
4086 char* data = nullptr;
4087
4088 if(mode == MODE::PERSISTENTLY_MAPPED)
4089 {
4090 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004091 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004092 }
4093 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4094 mode == MODE::MAP_TWO_TIMES)
4095 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004096 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004097 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004098 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004099
4100 if(mode == MODE::MAP_TWO_TIMES)
4101 {
4102 char* data2 = nullptr;
4103 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004104 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004105 }
4106 }
4107 else if(mode == MODE::DONT_MAP)
4108 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004109 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004110 }
4111 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004112 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004113
4114 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4115 if(data)
4116 data[0xFFFF] = data[0];
4117
4118 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4119 {
4120 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4121
4122 VmaAllocationInfo allocInfo;
4123 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4124 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004125 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004126 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004127 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004128 }
4129
4130 switch(rand.Generate() % 3)
4131 {
4132 case 0: Sleep(0); break; // Yield.
4133 case 1: Sleep(10); break; // 10 ms
4134 // default: No sleep.
4135 }
4136
4137 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4138 if(data)
4139 data[0xFFFF] = data[0];
4140 }
4141
4142 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4143 {
4144 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4145 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4146 {
4147 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4148
4149 VmaAllocationInfo allocInfo;
4150 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004151 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004152 }
4153
4154 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4155 }
4156 });
4157 }
4158
4159 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4160 threads[threadIndex].join();
4161
4162 vmaDestroyPool(g_hAllocator, pool);
4163 }
4164}
4165
4166static void WriteMainTestResultHeader(FILE* file)
4167{
4168 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004169 "Code,Time,"
4170 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004171 "Total Time (us),"
4172 "Allocation Time Min (us),"
4173 "Allocation Time Avg (us),"
4174 "Allocation Time Max (us),"
4175 "Deallocation Time Min (us),"
4176 "Deallocation Time Avg (us),"
4177 "Deallocation Time Max (us),"
4178 "Total Memory Allocated (B),"
4179 "Free Range Size Avg (B),"
4180 "Free Range Size Max (B)\n");
4181}
4182
4183static void WriteMainTestResult(
4184 FILE* file,
4185 const char* codeDescription,
4186 const char* testDescription,
4187 const Config& config, const Result& result)
4188{
4189 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4190 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4191 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4192 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4193 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4194 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4195 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4196
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004197 std::string currTime;
4198 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004199
4200 fprintf(file,
4201 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004202 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4203 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004204 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004205 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004206 totalTimeSeconds * 1e6f,
4207 allocationTimeMinSeconds * 1e6f,
4208 allocationTimeAvgSeconds * 1e6f,
4209 allocationTimeMaxSeconds * 1e6f,
4210 deallocationTimeMinSeconds * 1e6f,
4211 deallocationTimeAvgSeconds * 1e6f,
4212 deallocationTimeMaxSeconds * 1e6f,
4213 result.TotalMemoryAllocated,
4214 result.FreeRangeSizeAvg,
4215 result.FreeRangeSizeMax);
4216}
4217
4218static void WritePoolTestResultHeader(FILE* file)
4219{
4220 fprintf(file,
4221 "Code,Test,Time,"
4222 "Config,"
4223 "Total Time (us),"
4224 "Allocation Time Min (us),"
4225 "Allocation Time Avg (us),"
4226 "Allocation Time Max (us),"
4227 "Deallocation Time Min (us),"
4228 "Deallocation Time Avg (us),"
4229 "Deallocation Time Max (us),"
4230 "Lost Allocation Count,"
4231 "Lost Allocation Total Size (B),"
4232 "Failed Allocation Count,"
4233 "Failed Allocation Total Size (B)\n");
4234}
4235
4236static void WritePoolTestResult(
4237 FILE* file,
4238 const char* codeDescription,
4239 const char* testDescription,
4240 const PoolTestConfig& config,
4241 const PoolTestResult& result)
4242{
4243 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4244 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4245 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4246 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4247 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4248 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4249 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4250
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004251 std::string currTime;
4252 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004253
4254 fprintf(file,
4255 "%s,%s,%s,"
4256 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4257 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4258 // General
4259 codeDescription,
4260 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004261 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004262 // Config
4263 config.ThreadCount,
4264 (unsigned long long)config.PoolSize,
4265 config.FrameCount,
4266 config.TotalItemCount,
4267 config.UsedItemCountMin,
4268 config.UsedItemCountMax,
4269 config.ItemsToMakeUnusedPercent,
4270 // Results
4271 totalTimeSeconds * 1e6f,
4272 allocationTimeMinSeconds * 1e6f,
4273 allocationTimeAvgSeconds * 1e6f,
4274 allocationTimeMaxSeconds * 1e6f,
4275 deallocationTimeMinSeconds * 1e6f,
4276 deallocationTimeAvgSeconds * 1e6f,
4277 deallocationTimeMaxSeconds * 1e6f,
4278 result.LostAllocationCount,
4279 result.LostAllocationTotalSize,
4280 result.FailedAllocationCount,
4281 result.FailedAllocationTotalSize);
4282}
4283
4284static void PerformCustomMainTest(FILE* file)
4285{
4286 Config config{};
4287 config.RandSeed = 65735476;
4288 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4289 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4290 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4291 config.FreeOrder = FREE_ORDER::FORWARD;
4292 config.ThreadCount = 16;
4293 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004294 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004295
4296 // Buffers
4297 //config.AllocationSizes.push_back({4, 16, 1024});
4298 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4299
4300 // Images
4301 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4302 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4303
4304 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4305 config.AdditionalOperationCount = 1024;
4306
4307 Result result{};
4308 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004309 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004310 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4311}
4312
4313static void PerformCustomPoolTest(FILE* file)
4314{
4315 PoolTestConfig config;
4316 config.PoolSize = 100 * 1024 * 1024;
4317 config.RandSeed = 2345764;
4318 config.ThreadCount = 1;
4319 config.FrameCount = 200;
4320 config.ItemsToMakeUnusedPercent = 2;
4321
4322 AllocationSize allocSize = {};
4323 allocSize.BufferSizeMin = 1024;
4324 allocSize.BufferSizeMax = 1024 * 1024;
4325 allocSize.Probability = 1;
4326 config.AllocationSizes.push_back(allocSize);
4327
4328 allocSize.BufferSizeMin = 0;
4329 allocSize.BufferSizeMax = 0;
4330 allocSize.ImageSizeMin = 128;
4331 allocSize.ImageSizeMax = 1024;
4332 allocSize.Probability = 1;
4333 config.AllocationSizes.push_back(allocSize);
4334
4335 config.PoolSize = config.CalcAvgResourceSize() * 200;
4336 config.UsedItemCountMax = 160;
4337 config.TotalItemCount = config.UsedItemCountMax * 10;
4338 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4339
4340 g_MemoryAliasingWarningEnabled = false;
4341 PoolTestResult result = {};
4342 TestPool_Benchmark(result, config);
4343 g_MemoryAliasingWarningEnabled = true;
4344
4345 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4346}
4347
Adam Sawickib8333fb2018-03-13 16:15:53 +01004348static void PerformMainTests(FILE* file)
4349{
4350 uint32_t repeatCount = 1;
4351 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4352
4353 Config config{};
4354 config.RandSeed = 65735476;
4355 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4356 config.FreeOrder = FREE_ORDER::FORWARD;
4357
4358 size_t threadCountCount = 1;
4359 switch(ConfigType)
4360 {
4361 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4362 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4363 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4364 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4365 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4366 default: assert(0);
4367 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004368
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004369 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004370
Adam Sawickib8333fb2018-03-13 16:15:53 +01004371 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4372 {
4373 std::string desc1;
4374
4375 switch(threadCountIndex)
4376 {
4377 case 0:
4378 desc1 += "1_thread";
4379 config.ThreadCount = 1;
4380 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4381 break;
4382 case 1:
4383 desc1 += "16_threads+0%_common";
4384 config.ThreadCount = 16;
4385 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4386 break;
4387 case 2:
4388 desc1 += "16_threads+50%_common";
4389 config.ThreadCount = 16;
4390 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4391 break;
4392 case 3:
4393 desc1 += "16_threads+100%_common";
4394 config.ThreadCount = 16;
4395 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4396 break;
4397 case 4:
4398 desc1 += "2_threads+0%_common";
4399 config.ThreadCount = 2;
4400 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4401 break;
4402 case 5:
4403 desc1 += "2_threads+50%_common";
4404 config.ThreadCount = 2;
4405 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4406 break;
4407 case 6:
4408 desc1 += "2_threads+100%_common";
4409 config.ThreadCount = 2;
4410 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4411 break;
4412 default:
4413 assert(0);
4414 }
4415
4416 // 0 = buffers, 1 = images, 2 = buffers and images
4417 size_t buffersVsImagesCount = 2;
4418 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4419 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4420 {
4421 std::string desc2 = desc1;
4422 switch(buffersVsImagesIndex)
4423 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004424 case 0: desc2 += ",Buffers"; break;
4425 case 1: desc2 += ",Images"; break;
4426 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004427 default: assert(0);
4428 }
4429
4430 // 0 = small, 1 = large, 2 = small and large
4431 size_t smallVsLargeCount = 2;
4432 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4433 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4434 {
4435 std::string desc3 = desc2;
4436 switch(smallVsLargeIndex)
4437 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004438 case 0: desc3 += ",Small"; break;
4439 case 1: desc3 += ",Large"; break;
4440 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004441 default: assert(0);
4442 }
4443
4444 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4445 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4446 else
4447 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4448
4449 // 0 = varying sizes min...max, 1 = set of constant sizes
4450 size_t constantSizesCount = 1;
4451 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4452 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4453 {
4454 std::string desc4 = desc3;
4455 switch(constantSizesIndex)
4456 {
4457 case 0: desc4 += " Varying_sizes"; break;
4458 case 1: desc4 += " Constant_sizes"; break;
4459 default: assert(0);
4460 }
4461
4462 config.AllocationSizes.clear();
4463 // Buffers present
4464 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4465 {
4466 // Small
4467 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4468 {
4469 // Varying size
4470 if(constantSizesIndex == 0)
4471 config.AllocationSizes.push_back({4, 16, 1024});
4472 // Constant sizes
4473 else
4474 {
4475 config.AllocationSizes.push_back({1, 16, 16});
4476 config.AllocationSizes.push_back({1, 64, 64});
4477 config.AllocationSizes.push_back({1, 256, 256});
4478 config.AllocationSizes.push_back({1, 1024, 1024});
4479 }
4480 }
4481 // Large
4482 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4483 {
4484 // Varying size
4485 if(constantSizesIndex == 0)
4486 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4487 // Constant sizes
4488 else
4489 {
4490 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4491 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4492 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4493 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4494 }
4495 }
4496 }
4497 // Images present
4498 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4499 {
4500 // Small
4501 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4502 {
4503 // Varying size
4504 if(constantSizesIndex == 0)
4505 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4506 // Constant sizes
4507 else
4508 {
4509 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4510 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4511 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4512 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4513 }
4514 }
4515 // Large
4516 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4517 {
4518 // Varying size
4519 if(constantSizesIndex == 0)
4520 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4521 // Constant sizes
4522 else
4523 {
4524 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4525 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4526 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4527 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4528 }
4529 }
4530 }
4531
4532 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4533 size_t beginBytesToAllocateCount = 1;
4534 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4535 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4536 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4537 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4538 {
4539 std::string desc5 = desc4;
4540
4541 switch(beginBytesToAllocateIndex)
4542 {
4543 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004544 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004545 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4546 config.AdditionalOperationCount = 0;
4547 break;
4548 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004549 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004550 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4551 config.AdditionalOperationCount = 1024;
4552 break;
4553 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004554 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004555 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4556 config.AdditionalOperationCount = 1024;
4557 break;
4558 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004559 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004560 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4561 config.AdditionalOperationCount = 1024;
4562 break;
4563 default:
4564 assert(0);
4565 }
4566
Adam Sawicki0667e332018-08-24 17:26:44 +02004567 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004568 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004569 std::string desc6 = desc5;
4570 switch(strategyIndex)
4571 {
4572 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004573 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004574 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4575 break;
4576 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004577 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004578 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4579 break;
4580 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004581 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004582 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4583 break;
4584 default:
4585 assert(0);
4586 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004587
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004588 desc6 += ',';
4589 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004590
4591 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004592
4593 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4594 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004595 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004596
4597 Result result{};
4598 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004599 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004600 if(file)
4601 {
4602 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4603 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004604 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004605 }
4606 }
4607 }
4608 }
4609 }
4610 }
4611}
4612
4613static void PerformPoolTests(FILE* file)
4614{
4615 const size_t AVG_RESOURCES_PER_POOL = 300;
4616
4617 uint32_t repeatCount = 1;
4618 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4619
4620 PoolTestConfig config{};
4621 config.RandSeed = 2346343;
4622 config.FrameCount = 200;
4623 config.ItemsToMakeUnusedPercent = 2;
4624
4625 size_t threadCountCount = 1;
4626 switch(ConfigType)
4627 {
4628 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4629 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4630 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4631 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4632 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4633 default: assert(0);
4634 }
4635 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4636 {
4637 std::string desc1;
4638
4639 switch(threadCountIndex)
4640 {
4641 case 0:
4642 desc1 += "1_thread";
4643 config.ThreadCount = 1;
4644 break;
4645 case 1:
4646 desc1 += "16_threads";
4647 config.ThreadCount = 16;
4648 break;
4649 case 2:
4650 desc1 += "2_threads";
4651 config.ThreadCount = 2;
4652 break;
4653 default:
4654 assert(0);
4655 }
4656
4657 // 0 = buffers, 1 = images, 2 = buffers and images
4658 size_t buffersVsImagesCount = 2;
4659 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4660 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4661 {
4662 std::string desc2 = desc1;
4663 switch(buffersVsImagesIndex)
4664 {
4665 case 0: desc2 += " Buffers"; break;
4666 case 1: desc2 += " Images"; break;
4667 case 2: desc2 += " Buffers+Images"; break;
4668 default: assert(0);
4669 }
4670
4671 // 0 = small, 1 = large, 2 = small and large
4672 size_t smallVsLargeCount = 2;
4673 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4674 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4675 {
4676 std::string desc3 = desc2;
4677 switch(smallVsLargeIndex)
4678 {
4679 case 0: desc3 += " Small"; break;
4680 case 1: desc3 += " Large"; break;
4681 case 2: desc3 += " Small+Large"; break;
4682 default: assert(0);
4683 }
4684
4685 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4686 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4687 else
4688 config.PoolSize = 4ull * 1024 * 1024;
4689
4690 // 0 = varying sizes min...max, 1 = set of constant sizes
4691 size_t constantSizesCount = 1;
4692 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4693 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4694 {
4695 std::string desc4 = desc3;
4696 switch(constantSizesIndex)
4697 {
4698 case 0: desc4 += " Varying_sizes"; break;
4699 case 1: desc4 += " Constant_sizes"; break;
4700 default: assert(0);
4701 }
4702
4703 config.AllocationSizes.clear();
4704 // Buffers present
4705 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4706 {
4707 // Small
4708 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4709 {
4710 // Varying size
4711 if(constantSizesIndex == 0)
4712 config.AllocationSizes.push_back({4, 16, 1024});
4713 // Constant sizes
4714 else
4715 {
4716 config.AllocationSizes.push_back({1, 16, 16});
4717 config.AllocationSizes.push_back({1, 64, 64});
4718 config.AllocationSizes.push_back({1, 256, 256});
4719 config.AllocationSizes.push_back({1, 1024, 1024});
4720 }
4721 }
4722 // Large
4723 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4724 {
4725 // Varying size
4726 if(constantSizesIndex == 0)
4727 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4728 // Constant sizes
4729 else
4730 {
4731 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4732 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4733 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4734 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4735 }
4736 }
4737 }
4738 // Images present
4739 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4740 {
4741 // Small
4742 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4743 {
4744 // Varying size
4745 if(constantSizesIndex == 0)
4746 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4747 // Constant sizes
4748 else
4749 {
4750 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4751 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4752 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4753 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4754 }
4755 }
4756 // Large
4757 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4758 {
4759 // Varying size
4760 if(constantSizesIndex == 0)
4761 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4762 // Constant sizes
4763 else
4764 {
4765 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4766 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4767 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4768 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4769 }
4770 }
4771 }
4772
4773 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4774 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4775
4776 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4777 size_t subscriptionModeCount;
4778 switch(ConfigType)
4779 {
4780 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4781 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4782 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4783 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4784 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4785 default: assert(0);
4786 }
4787 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4788 {
4789 std::string desc5 = desc4;
4790
4791 switch(subscriptionModeIndex)
4792 {
4793 case 0:
4794 desc5 += " Subscription_66%";
4795 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4796 break;
4797 case 1:
4798 desc5 += " Subscription_133%";
4799 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4800 break;
4801 case 2:
4802 desc5 += " Subscription_100%";
4803 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4804 break;
4805 case 3:
4806 desc5 += " Subscription_33%";
4807 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4808 break;
4809 case 4:
4810 desc5 += " Subscription_166%";
4811 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4812 break;
4813 default:
4814 assert(0);
4815 }
4816
4817 config.TotalItemCount = config.UsedItemCountMax * 5;
4818 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4819
4820 const char* testDescription = desc5.c_str();
4821
4822 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4823 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004824 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004825
4826 PoolTestResult result{};
4827 g_MemoryAliasingWarningEnabled = false;
4828 TestPool_Benchmark(result, config);
4829 g_MemoryAliasingWarningEnabled = true;
4830 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4831 }
4832 }
4833 }
4834 }
4835 }
4836 }
4837}
4838
Adam Sawickia83793a2018-09-03 13:40:42 +02004839static void BasicTestBuddyAllocator()
4840{
4841 wprintf(L"Basic test buddy allocator\n");
4842
4843 RandomNumberGenerator rand{76543};
4844
4845 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4846 sampleBufCreateInfo.size = 1024; // Whatever.
4847 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4848
4849 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4850 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4851
4852 VmaPoolCreateInfo poolCreateInfo = {};
4853 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004854 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004855
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004856 // Deliberately adding 1023 to test usable size smaller than memory block size.
4857 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004858 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004859 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004860
4861 VmaPool pool = nullptr;
4862 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004863 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004864
4865 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4866
4867 VmaAllocationCreateInfo allocCreateInfo = {};
4868 allocCreateInfo.pool = pool;
4869
4870 std::vector<BufferInfo> bufInfo;
4871 BufferInfo newBufInfo;
4872 VmaAllocationInfo allocInfo;
4873
4874 bufCreateInfo.size = 1024 * 256;
4875 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4876 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004877 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004878 bufInfo.push_back(newBufInfo);
4879
4880 bufCreateInfo.size = 1024 * 512;
4881 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4882 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004883 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004884 bufInfo.push_back(newBufInfo);
4885
4886 bufCreateInfo.size = 1024 * 128;
4887 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4888 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004889 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004890 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004891
4892 // Test very small allocation, smaller than minimum node size.
4893 bufCreateInfo.size = 1;
4894 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4895 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004896 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004897 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004898
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004899 // Test some small allocation with alignment requirement.
4900 {
4901 VkMemoryRequirements memReq;
4902 memReq.alignment = 256;
4903 memReq.memoryTypeBits = UINT32_MAX;
4904 memReq.size = 32;
4905
4906 newBufInfo.Buffer = VK_NULL_HANDLE;
4907 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4908 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004909 TEST(res == VK_SUCCESS);
4910 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004911 bufInfo.push_back(newBufInfo);
4912 }
4913
4914 //SaveAllocatorStatsToFile(L"TEST.json");
4915
Adam Sawicki21017c62018-09-07 15:26:59 +02004916 VmaPoolStats stats = {};
4917 vmaGetPoolStats(g_hAllocator, pool, &stats);
4918 int DBG = 0; // Set breakpoint here to inspect `stats`.
4919
Adam Sawicki80927152018-09-07 17:27:23 +02004920 // Allocate enough new buffers to surely fall into second block.
4921 for(uint32_t i = 0; i < 32; ++i)
4922 {
4923 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4924 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4925 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004926 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004927 bufInfo.push_back(newBufInfo);
4928 }
4929
4930 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4931
Adam Sawickia83793a2018-09-03 13:40:42 +02004932 // Destroy the buffers in random order.
4933 while(!bufInfo.empty())
4934 {
4935 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4936 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4937 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4938 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4939 }
4940
4941 vmaDestroyPool(g_hAllocator, pool);
4942}
4943
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004944static void BasicTestAllocatePages()
4945{
4946 wprintf(L"Basic test allocate pages\n");
4947
4948 RandomNumberGenerator rand{765461};
4949
4950 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4951 sampleBufCreateInfo.size = 1024; // Whatever.
4952 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4953
4954 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4955 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4956
4957 VmaPoolCreateInfo poolCreateInfo = {};
4958 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02004959 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004960
4961 // 1 block of 1 MB.
4962 poolCreateInfo.blockSize = 1024 * 1024;
4963 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
4964
4965 // Create pool.
4966 VmaPool pool = nullptr;
4967 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02004968 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004969
4970 // Make 100 allocations of 4 KB - they should fit into the pool.
4971 VkMemoryRequirements memReq;
4972 memReq.memoryTypeBits = UINT32_MAX;
4973 memReq.alignment = 4 * 1024;
4974 memReq.size = 4 * 1024;
4975
4976 VmaAllocationCreateInfo allocCreateInfo = {};
4977 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
4978 allocCreateInfo.pool = pool;
4979
4980 constexpr uint32_t allocCount = 100;
4981
4982 std::vector<VmaAllocation> alloc{allocCount};
4983 std::vector<VmaAllocationInfo> allocInfo{allocCount};
4984 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02004985 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004986 for(uint32_t i = 0; i < allocCount; ++i)
4987 {
Adam Sawickia7d77692018-10-03 16:15:27 +02004988 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004989 allocInfo[i].pMappedData != nullptr &&
4990 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
4991 allocInfo[i].memoryType == allocInfo[0].memoryType);
4992 }
4993
4994 // Free the allocations.
4995 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
4996 std::fill(alloc.begin(), alloc.end(), nullptr);
4997 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
4998
4999 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5000 // Also test optional allocationInfo = null.
5001 memReq.size = 100 * 1024;
5002 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005003 TEST(res != VK_SUCCESS);
5004 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005005
5006 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5007 memReq.size = 4 * 1024;
5008 memReq.alignment = 128 * 1024;
5009 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005010 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005011
5012 // Make 100 dedicated allocations of 4 KB.
5013 memReq.alignment = 4 * 1024;
5014 memReq.size = 4 * 1024;
5015
5016 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5017 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5018 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5019 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005020 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005021 for(uint32_t i = 0; i < allocCount; ++i)
5022 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005023 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005024 allocInfo[i].pMappedData != nullptr &&
5025 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5026 allocInfo[i].offset == 0);
5027 if(i > 0)
5028 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005029 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005030 }
5031 }
5032
5033 // Free the allocations.
5034 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5035 std::fill(alloc.begin(), alloc.end(), nullptr);
5036 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5037
5038 vmaDestroyPool(g_hAllocator, pool);
5039}
5040
Adam Sawickif2975342018-10-16 13:49:02 +02005041// Test the testing environment.
5042static void TestGpuData()
5043{
5044 RandomNumberGenerator rand = { 53434 };
5045
5046 std::vector<AllocInfo> allocInfo;
5047
5048 for(size_t i = 0; i < 100; ++i)
5049 {
5050 AllocInfo info = {};
5051
5052 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5053 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5054 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5055 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5056 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5057
5058 VmaAllocationCreateInfo allocCreateInfo = {};
5059 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5060
5061 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5062 TEST(res == VK_SUCCESS);
5063
5064 info.m_StartValue = rand.Generate();
5065
5066 allocInfo.push_back(std::move(info));
5067 }
5068
5069 UploadGpuData(allocInfo.data(), allocInfo.size());
5070
5071 ValidateGpuData(allocInfo.data(), allocInfo.size());
5072
5073 DestroyAllAllocations(allocInfo);
5074}
5075
Adam Sawickib8333fb2018-03-13 16:15:53 +01005076void Test()
5077{
5078 wprintf(L"TESTING:\n");
5079
Adam Sawicki5c8af7b2018-12-10 13:34:54 +01005080 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005081 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005082 ////////////////////////////////////////////////////////////////////////////////
5083 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02005084 return;
5085 }
5086
Adam Sawickib8333fb2018-03-13 16:15:53 +01005087 // # Simple tests
5088
5089 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005090 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005091#if VMA_DEBUG_MARGIN
5092 TestDebugMargin();
5093#else
5094 TestPool_SameSize();
5095 TestHeapSizeLimit();
5096#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005097#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5098 TestAllocationsInitialization();
5099#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005100 TestMapping();
Adam Sawickidaa6a552019-06-25 15:26:37 +02005101 TestDeviceLocalMapped();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005102 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005103 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005104 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005105 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005106
Adam Sawicki4338f662018-09-07 14:12:37 +02005107 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005108 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005109
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005110 {
5111 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005112 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005113 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005114 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005115 fclose(file);
5116 }
5117
Adam Sawickib8333fb2018-03-13 16:15:53 +01005118 TestDefragmentationSimple();
5119 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005120 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005121 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005122
5123 // # Detailed tests
5124 FILE* file;
5125 fopen_s(&file, "Results.csv", "w");
5126 assert(file != NULL);
5127
5128 WriteMainTestResultHeader(file);
5129 PerformMainTests(file);
5130 //PerformCustomMainTest(file);
5131
5132 WritePoolTestResultHeader(file);
5133 PerformPoolTests(file);
5134 //PerformCustomPoolTest(file);
5135
5136 fclose(file);
5137
5138 wprintf(L"Done.\n");
5139}
5140
Adam Sawickif1a793c2018-03-13 15:42:22 +01005141#endif // #ifdef _WIN32