blob: 348102800b4adaba260ddb0d4fe7cbdba2cf2cbd [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
2// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawicki94ce3d72019-04-17 14:59:25 +020029#include <functional>
Adam Sawickif1a793c2018-03-13 15:42:22 +010030
31#ifdef _WIN32
32
Adam Sawicki33d2ce72018-08-27 13:59:13 +020033static const char* CODE_DESCRIPTION = "Foo";
34
Adam Sawickif2975342018-10-16 13:49:02 +020035extern VkCommandBuffer g_hTemporaryCommandBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +020036extern const VkAllocationCallbacks* g_Allocs;
Adam Sawickif2975342018-10-16 13:49:02 +020037void BeginSingleTimeCommands();
38void EndSingleTimeCommands();
39
Adam Sawickibdb89a92018-12-13 11:56:30 +010040#ifndef VMA_DEBUG_MARGIN
41 #define VMA_DEBUG_MARGIN 0
42#endif
43
Adam Sawicki0a607132018-08-24 11:18:41 +020044enum CONFIG_TYPE {
45 CONFIG_TYPE_MINIMUM,
46 CONFIG_TYPE_SMALL,
47 CONFIG_TYPE_AVERAGE,
48 CONFIG_TYPE_LARGE,
49 CONFIG_TYPE_MAXIMUM,
50 CONFIG_TYPE_COUNT
51};
52
Adam Sawickif2975342018-10-16 13:49:02 +020053static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
54//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020055
Adam Sawickib8333fb2018-03-13 16:15:53 +010056enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
57
Adam Sawicki0667e332018-08-24 17:26:44 +020058static const char* FREE_ORDER_NAMES[] = {
59 "FORWARD",
60 "BACKWARD",
61 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020062};
63
Adam Sawicki80927152018-09-07 17:27:23 +020064// Copy of internal VmaAlgorithmToStr.
65static const char* AlgorithmToStr(uint32_t algorithm)
66{
67 switch(algorithm)
68 {
69 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
70 return "Linear";
71 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
72 return "Buddy";
73 case 0:
74 return "Default";
75 default:
76 assert(0);
77 return "";
78 }
79}
80
Adam Sawickib8333fb2018-03-13 16:15:53 +010081struct AllocationSize
82{
83 uint32_t Probability;
84 VkDeviceSize BufferSizeMin, BufferSizeMax;
85 uint32_t ImageSizeMin, ImageSizeMax;
86};
87
88struct Config
89{
90 uint32_t RandSeed;
91 VkDeviceSize BeginBytesToAllocate;
92 uint32_t AdditionalOperationCount;
93 VkDeviceSize MaxBytesToAllocate;
94 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
95 std::vector<AllocationSize> AllocationSizes;
96 uint32_t ThreadCount;
97 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
98 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020099 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +0100100};
101
102struct Result
103{
104 duration TotalTime;
105 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
106 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
107 VkDeviceSize TotalMemoryAllocated;
108 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
109};
110
111void TestDefragmentationSimple();
112void TestDefragmentationFull();
113
114struct PoolTestConfig
115{
116 uint32_t RandSeed;
117 uint32_t ThreadCount;
118 VkDeviceSize PoolSize;
119 uint32_t FrameCount;
120 uint32_t TotalItemCount;
121 // Range for number of items used in each frame.
122 uint32_t UsedItemCountMin, UsedItemCountMax;
123 // Percent of items to make unused, and possibly make some others used in each frame.
124 uint32_t ItemsToMakeUnusedPercent;
125 std::vector<AllocationSize> AllocationSizes;
126
127 VkDeviceSize CalcAvgResourceSize() const
128 {
129 uint32_t probabilitySum = 0;
130 VkDeviceSize sizeSum = 0;
131 for(size_t i = 0; i < AllocationSizes.size(); ++i)
132 {
133 const AllocationSize& allocSize = AllocationSizes[i];
134 if(allocSize.BufferSizeMax > 0)
135 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
136 else
137 {
138 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
139 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
140 }
141 probabilitySum += allocSize.Probability;
142 }
143 return sizeSum / probabilitySum;
144 }
145
146 bool UsesBuffers() const
147 {
148 for(size_t i = 0; i < AllocationSizes.size(); ++i)
149 if(AllocationSizes[i].BufferSizeMax > 0)
150 return true;
151 return false;
152 }
153
154 bool UsesImages() const
155 {
156 for(size_t i = 0; i < AllocationSizes.size(); ++i)
157 if(AllocationSizes[i].ImageSizeMax > 0)
158 return true;
159 return false;
160 }
161};
162
163struct PoolTestResult
164{
165 duration TotalTime;
166 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
167 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
168 size_t LostAllocationCount, LostAllocationTotalSize;
169 size_t FailedAllocationCount, FailedAllocationTotalSize;
170};
171
172static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
173
Adam Sawicki51fa9662018-10-03 13:44:29 +0200174uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200175
Adam Sawickib8333fb2018-03-13 16:15:53 +0100176struct BufferInfo
177{
178 VkBuffer Buffer = VK_NULL_HANDLE;
179 VmaAllocation Allocation = VK_NULL_HANDLE;
180};
181
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200182static uint32_t GetAllocationStrategyCount()
183{
184 uint32_t strategyCount = 0;
185 switch(ConfigType)
186 {
187 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
188 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
189 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
190 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
191 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
192 default: assert(0);
193 }
194 return strategyCount;
195}
196
197static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
198{
199 switch(allocStrategy)
200 {
201 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
202 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
203 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
204 case 0: return "Default"; break;
205 default: assert(0); return "";
206 }
207}
208
Adam Sawickib8333fb2018-03-13 16:15:53 +0100209static void InitResult(Result& outResult)
210{
211 outResult.TotalTime = duration::zero();
212 outResult.AllocationTimeMin = duration::max();
213 outResult.AllocationTimeAvg = duration::zero();
214 outResult.AllocationTimeMax = duration::min();
215 outResult.DeallocationTimeMin = duration::max();
216 outResult.DeallocationTimeAvg = duration::zero();
217 outResult.DeallocationTimeMax = duration::min();
218 outResult.TotalMemoryAllocated = 0;
219 outResult.FreeRangeSizeAvg = 0;
220 outResult.FreeRangeSizeMax = 0;
221}
222
223class TimeRegisterObj
224{
225public:
226 TimeRegisterObj(duration& min, duration& sum, duration& max) :
227 m_Min(min),
228 m_Sum(sum),
229 m_Max(max),
230 m_TimeBeg(std::chrono::high_resolution_clock::now())
231 {
232 }
233
234 ~TimeRegisterObj()
235 {
236 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
237 m_Sum += d;
238 if(d < m_Min) m_Min = d;
239 if(d > m_Max) m_Max = d;
240 }
241
242private:
243 duration& m_Min;
244 duration& m_Sum;
245 duration& m_Max;
246 time_point m_TimeBeg;
247};
248
249struct PoolTestThreadResult
250{
251 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
252 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
253 size_t AllocationCount, DeallocationCount;
254 size_t LostAllocationCount, LostAllocationTotalSize;
255 size_t FailedAllocationCount, FailedAllocationTotalSize;
256};
257
258class AllocationTimeRegisterObj : public TimeRegisterObj
259{
260public:
261 AllocationTimeRegisterObj(Result& result) :
262 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
263 {
264 }
265};
266
267class DeallocationTimeRegisterObj : public TimeRegisterObj
268{
269public:
270 DeallocationTimeRegisterObj(Result& result) :
271 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
272 {
273 }
274};
275
276class PoolAllocationTimeRegisterObj : public TimeRegisterObj
277{
278public:
279 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
280 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
281 {
282 }
283};
284
285class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
286{
287public:
288 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
289 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
290 {
291 }
292};
293
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200294static void CurrentTimeToStr(std::string& out)
295{
296 time_t rawTime; time(&rawTime);
297 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
298 char timeStr[128];
299 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
300 out = timeStr;
301}
302
Adam Sawickib8333fb2018-03-13 16:15:53 +0100303VkResult MainTest(Result& outResult, const Config& config)
304{
305 assert(config.ThreadCount > 0);
306
307 InitResult(outResult);
308
309 RandomNumberGenerator mainRand{config.RandSeed};
310
311 time_point timeBeg = std::chrono::high_resolution_clock::now();
312
313 std::atomic<size_t> allocationCount = 0;
314 VkResult res = VK_SUCCESS;
315
316 uint32_t memUsageProbabilitySum =
317 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
318 config.MemUsageProbability[2] + config.MemUsageProbability[3];
319 assert(memUsageProbabilitySum > 0);
320
321 uint32_t allocationSizeProbabilitySum = std::accumulate(
322 config.AllocationSizes.begin(),
323 config.AllocationSizes.end(),
324 0u,
325 [](uint32_t sum, const AllocationSize& allocSize) {
326 return sum + allocSize.Probability;
327 });
328
329 struct Allocation
330 {
331 VkBuffer Buffer;
332 VkImage Image;
333 VmaAllocation Alloc;
334 };
335
336 std::vector<Allocation> commonAllocations;
337 std::mutex commonAllocationsMutex;
338
339 auto Allocate = [&](
340 VkDeviceSize bufferSize,
341 const VkExtent2D imageExtent,
342 RandomNumberGenerator& localRand,
343 VkDeviceSize& totalAllocatedBytes,
344 std::vector<Allocation>& allocations) -> VkResult
345 {
346 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
347
348 uint32_t memUsageIndex = 0;
349 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
350 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
351 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
352
353 VmaAllocationCreateInfo memReq = {};
354 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200355 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100356
357 Allocation allocation = {};
358 VmaAllocationInfo allocationInfo;
359
360 // Buffer
361 if(bufferSize > 0)
362 {
363 assert(imageExtent.width == 0);
364 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
365 bufferInfo.size = bufferSize;
366 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
367
368 {
369 AllocationTimeRegisterObj timeRegisterObj{outResult};
370 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
371 }
372 }
373 // Image
374 else
375 {
376 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
377 imageInfo.imageType = VK_IMAGE_TYPE_2D;
378 imageInfo.extent.width = imageExtent.width;
379 imageInfo.extent.height = imageExtent.height;
380 imageInfo.extent.depth = 1;
381 imageInfo.mipLevels = 1;
382 imageInfo.arrayLayers = 1;
383 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
384 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
385 VK_IMAGE_TILING_OPTIMAL :
386 VK_IMAGE_TILING_LINEAR;
387 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
388 switch(memReq.usage)
389 {
390 case VMA_MEMORY_USAGE_GPU_ONLY:
391 switch(localRand.Generate() % 3)
392 {
393 case 0:
394 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
395 break;
396 case 1:
397 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
398 break;
399 case 2:
400 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
401 break;
402 }
403 break;
404 case VMA_MEMORY_USAGE_CPU_ONLY:
405 case VMA_MEMORY_USAGE_CPU_TO_GPU:
406 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
407 break;
408 case VMA_MEMORY_USAGE_GPU_TO_CPU:
409 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
410 break;
411 }
412 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
413 imageInfo.flags = 0;
414
415 {
416 AllocationTimeRegisterObj timeRegisterObj{outResult};
417 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
418 }
419 }
420
421 if(res == VK_SUCCESS)
422 {
423 ++allocationCount;
424 totalAllocatedBytes += allocationInfo.size;
425 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
426 if(useCommonAllocations)
427 {
428 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
429 commonAllocations.push_back(allocation);
430 }
431 else
432 allocations.push_back(allocation);
433 }
434 else
435 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200436 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100437 }
438 return res;
439 };
440
441 auto GetNextAllocationSize = [&](
442 VkDeviceSize& outBufSize,
443 VkExtent2D& outImageSize,
444 RandomNumberGenerator& localRand)
445 {
446 outBufSize = 0;
447 outImageSize = {0, 0};
448
449 uint32_t allocSizeIndex = 0;
450 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
451 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
452 r -= config.AllocationSizes[allocSizeIndex++].Probability;
453
454 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
455 if(allocSize.BufferSizeMax > 0)
456 {
457 assert(allocSize.ImageSizeMax == 0);
458 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
459 outBufSize = allocSize.BufferSizeMin;
460 else
461 {
462 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
463 outBufSize = outBufSize / 16 * 16;
464 }
465 }
466 else
467 {
468 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
469 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
470 else
471 {
472 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
473 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
474 }
475 }
476 };
477
478 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
479 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
480
481 auto ThreadProc = [&](uint32_t randSeed) -> void
482 {
483 RandomNumberGenerator threadRand(randSeed);
484 VkDeviceSize threadTotalAllocatedBytes = 0;
485 std::vector<Allocation> threadAllocations;
486 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
487 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
488 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
489
490 // BEGIN ALLOCATIONS
491 for(;;)
492 {
493 VkDeviceSize bufferSize = 0;
494 VkExtent2D imageExtent = {};
495 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
496 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
497 threadBeginBytesToAllocate)
498 {
499 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
500 break;
501 }
502 else
503 break;
504 }
505
506 // ADDITIONAL ALLOCATIONS AND FREES
507 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
508 {
509 VkDeviceSize bufferSize = 0;
510 VkExtent2D imageExtent = {};
511 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
512
513 // true = allocate, false = free
514 bool allocate = threadRand.Generate() % 2 != 0;
515
516 if(allocate)
517 {
518 if(threadTotalAllocatedBytes +
519 bufferSize +
520 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
521 threadMaxBytesToAllocate)
522 {
523 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
524 break;
525 }
526 }
527 else
528 {
529 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
530 if(useCommonAllocations)
531 {
532 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
533 if(!commonAllocations.empty())
534 {
535 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
536 VmaAllocationInfo allocationInfo;
537 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
538 if(threadTotalAllocatedBytes >= allocationInfo.size)
539 {
540 DeallocationTimeRegisterObj timeRegisterObj{outResult};
541 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
542 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
543 else
544 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
545 threadTotalAllocatedBytes -= allocationInfo.size;
546 commonAllocations.erase(commonAllocations.begin() + indexToFree);
547 }
548 }
549 }
550 else
551 {
552 if(!threadAllocations.empty())
553 {
554 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
555 VmaAllocationInfo allocationInfo;
556 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
557 if(threadTotalAllocatedBytes >= allocationInfo.size)
558 {
559 DeallocationTimeRegisterObj timeRegisterObj{outResult};
560 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
561 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
562 else
563 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
564 threadTotalAllocatedBytes -= allocationInfo.size;
565 threadAllocations.erase(threadAllocations.begin() + indexToFree);
566 }
567 }
568 }
569 }
570 }
571
572 ++numThreadsReachedMaxAllocations;
573
574 WaitForSingleObject(threadsFinishEvent, INFINITE);
575
576 // DEALLOCATION
577 while(!threadAllocations.empty())
578 {
579 size_t indexToFree = 0;
580 switch(config.FreeOrder)
581 {
582 case FREE_ORDER::FORWARD:
583 indexToFree = 0;
584 break;
585 case FREE_ORDER::BACKWARD:
586 indexToFree = threadAllocations.size() - 1;
587 break;
588 case FREE_ORDER::RANDOM:
589 indexToFree = mainRand.Generate() % threadAllocations.size();
590 break;
591 }
592
593 {
594 DeallocationTimeRegisterObj timeRegisterObj{outResult};
595 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
596 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
597 else
598 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
599 }
600 threadAllocations.erase(threadAllocations.begin() + indexToFree);
601 }
602 };
603
604 uint32_t threadRandSeed = mainRand.Generate();
605 std::vector<std::thread> bkgThreads;
606 for(size_t i = 0; i < config.ThreadCount; ++i)
607 {
608 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
609 }
610
611 // Wait for threads reached max allocations
612 while(numThreadsReachedMaxAllocations < config.ThreadCount)
613 Sleep(0);
614
615 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
616 VmaStats vmaStats = {};
617 vmaCalculateStats(g_hAllocator, &vmaStats);
618 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
619 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
620 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
621
622 // Signal threads to deallocate
623 SetEvent(threadsFinishEvent);
624
625 // Wait for threads finished
626 for(size_t i = 0; i < bkgThreads.size(); ++i)
627 bkgThreads[i].join();
628 bkgThreads.clear();
629
630 CloseHandle(threadsFinishEvent);
631
632 // Deallocate remaining common resources
633 while(!commonAllocations.empty())
634 {
635 size_t indexToFree = 0;
636 switch(config.FreeOrder)
637 {
638 case FREE_ORDER::FORWARD:
639 indexToFree = 0;
640 break;
641 case FREE_ORDER::BACKWARD:
642 indexToFree = commonAllocations.size() - 1;
643 break;
644 case FREE_ORDER::RANDOM:
645 indexToFree = mainRand.Generate() % commonAllocations.size();
646 break;
647 }
648
649 {
650 DeallocationTimeRegisterObj timeRegisterObj{outResult};
651 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
652 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
653 else
654 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
655 }
656 commonAllocations.erase(commonAllocations.begin() + indexToFree);
657 }
658
659 if(allocationCount)
660 {
661 outResult.AllocationTimeAvg /= allocationCount;
662 outResult.DeallocationTimeAvg /= allocationCount;
663 }
664
665 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
666
667 return res;
668}
669
Adam Sawicki51fa9662018-10-03 13:44:29 +0200670void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100671{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100672 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100673 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200674 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100675 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200676 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100677}
678
679struct AllocInfo
680{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200681 VmaAllocation m_Allocation = VK_NULL_HANDLE;
682 VkBuffer m_Buffer = VK_NULL_HANDLE;
683 VkImage m_Image = VK_NULL_HANDLE;
684 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100685 union
686 {
687 VkBufferCreateInfo m_BufferInfo;
688 VkImageCreateInfo m_ImageInfo;
689 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200690
691 void CreateBuffer(
692 const VkBufferCreateInfo& bufCreateInfo,
693 const VmaAllocationCreateInfo& allocCreateInfo);
694 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100695};
696
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200697void AllocInfo::CreateBuffer(
698 const VkBufferCreateInfo& bufCreateInfo,
699 const VmaAllocationCreateInfo& allocCreateInfo)
700{
701 m_BufferInfo = bufCreateInfo;
702 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
703 TEST(res == VK_SUCCESS);
704}
705
706void AllocInfo::Destroy()
707{
708 if(m_Image)
709 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200710 vkDestroyImage(g_hDevice, m_Image, g_Allocs);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200711 }
712 if(m_Buffer)
713 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200714 vkDestroyBuffer(g_hDevice, m_Buffer, g_Allocs);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200715 }
716 if(m_Allocation)
717 {
718 vmaFreeMemory(g_hAllocator, m_Allocation);
719 }
720}
721
Adam Sawickif2975342018-10-16 13:49:02 +0200722class StagingBufferCollection
723{
724public:
725 StagingBufferCollection() { }
726 ~StagingBufferCollection();
727 // Returns false if maximum total size of buffers would be exceeded.
728 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
729 void ReleaseAllBuffers();
730
731private:
732 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
733 struct BufInfo
734 {
735 VmaAllocation Allocation = VK_NULL_HANDLE;
736 VkBuffer Buffer = VK_NULL_HANDLE;
737 VkDeviceSize Size = VK_WHOLE_SIZE;
738 void* MappedPtr = nullptr;
739 bool Used = false;
740 };
741 std::vector<BufInfo> m_Bufs;
742 // Including both used and unused.
743 VkDeviceSize m_TotalSize = 0;
744};
745
746StagingBufferCollection::~StagingBufferCollection()
747{
748 for(size_t i = m_Bufs.size(); i--; )
749 {
750 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
751 }
752}
753
754bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
755{
756 assert(size <= MAX_TOTAL_SIZE);
757
758 // Try to find existing unused buffer with best size.
759 size_t bestIndex = SIZE_MAX;
760 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
761 {
762 BufInfo& currBufInfo = m_Bufs[i];
763 if(!currBufInfo.Used && currBufInfo.Size >= size &&
764 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
765 {
766 bestIndex = i;
767 }
768 }
769
770 if(bestIndex != SIZE_MAX)
771 {
772 m_Bufs[bestIndex].Used = true;
773 outBuffer = m_Bufs[bestIndex].Buffer;
774 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
775 return true;
776 }
777
778 // Allocate new buffer with requested size.
779 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
780 {
781 BufInfo bufInfo;
782 bufInfo.Size = size;
783 bufInfo.Used = true;
784
785 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
786 bufCreateInfo.size = size;
787 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
788
789 VmaAllocationCreateInfo allocCreateInfo = {};
790 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
791 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
792
793 VmaAllocationInfo allocInfo;
794 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
795 bufInfo.MappedPtr = allocInfo.pMappedData;
796 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
797
798 outBuffer = bufInfo.Buffer;
799 outMappedPtr = bufInfo.MappedPtr;
800
801 m_Bufs.push_back(std::move(bufInfo));
802
803 m_TotalSize += size;
804
805 return true;
806 }
807
808 // There are some unused but smaller buffers: Free them and try again.
809 bool hasUnused = false;
810 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
811 {
812 if(!m_Bufs[i].Used)
813 {
814 hasUnused = true;
815 break;
816 }
817 }
818 if(hasUnused)
819 {
820 for(size_t i = m_Bufs.size(); i--; )
821 {
822 if(!m_Bufs[i].Used)
823 {
824 m_TotalSize -= m_Bufs[i].Size;
825 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
826 m_Bufs.erase(m_Bufs.begin() + i);
827 }
828 }
829
830 return AcquireBuffer(size, outBuffer, outMappedPtr);
831 }
832
833 return false;
834}
835
836void StagingBufferCollection::ReleaseAllBuffers()
837{
838 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
839 {
840 m_Bufs[i].Used = false;
841 }
842}
843
844static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
845{
846 StagingBufferCollection stagingBufs;
847
848 bool cmdBufferStarted = false;
849 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
850 {
851 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
852 if(currAllocInfo.m_Buffer)
853 {
854 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
855
856 VkBuffer stagingBuf = VK_NULL_HANDLE;
857 void* stagingBufMappedPtr = nullptr;
858 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
859 {
860 TEST(cmdBufferStarted);
861 EndSingleTimeCommands();
862 stagingBufs.ReleaseAllBuffers();
863 cmdBufferStarted = false;
864
865 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
866 TEST(ok);
867 }
868
869 // Fill staging buffer.
870 {
871 assert(size % sizeof(uint32_t) == 0);
872 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
873 uint32_t val = currAllocInfo.m_StartValue;
874 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
875 {
876 *stagingValPtr = val;
877 ++stagingValPtr;
878 ++val;
879 }
880 }
881
882 // Issue copy command from staging buffer to destination buffer.
883 if(!cmdBufferStarted)
884 {
885 cmdBufferStarted = true;
886 BeginSingleTimeCommands();
887 }
888
889 VkBufferCopy copy = {};
890 copy.srcOffset = 0;
891 copy.dstOffset = 0;
892 copy.size = size;
893 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
894 }
895 else
896 {
897 TEST(0 && "Images not currently supported.");
898 }
899 }
900
901 if(cmdBufferStarted)
902 {
903 EndSingleTimeCommands();
904 stagingBufs.ReleaseAllBuffers();
905 }
906}
907
908static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
909{
910 StagingBufferCollection stagingBufs;
911
912 bool cmdBufferStarted = false;
913 size_t validateAllocIndexOffset = 0;
914 std::vector<void*> validateStagingBuffers;
915 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
916 {
917 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
918 if(currAllocInfo.m_Buffer)
919 {
920 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
921
922 VkBuffer stagingBuf = VK_NULL_HANDLE;
923 void* stagingBufMappedPtr = nullptr;
924 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
925 {
926 TEST(cmdBufferStarted);
927 EndSingleTimeCommands();
928 cmdBufferStarted = false;
929
930 for(size_t validateIndex = 0;
931 validateIndex < validateStagingBuffers.size();
932 ++validateIndex)
933 {
934 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
935 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
936 TEST(validateSize % sizeof(uint32_t) == 0);
937 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
938 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
939 bool valid = true;
940 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
941 {
942 if(*stagingValPtr != val)
943 {
944 valid = false;
945 break;
946 }
947 ++stagingValPtr;
948 ++val;
949 }
950 TEST(valid);
951 }
952
953 stagingBufs.ReleaseAllBuffers();
954
955 validateAllocIndexOffset = allocInfoIndex;
956 validateStagingBuffers.clear();
957
958 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
959 TEST(ok);
960 }
961
962 // Issue copy command from staging buffer to destination buffer.
963 if(!cmdBufferStarted)
964 {
965 cmdBufferStarted = true;
966 BeginSingleTimeCommands();
967 }
968
969 VkBufferCopy copy = {};
970 copy.srcOffset = 0;
971 copy.dstOffset = 0;
972 copy.size = size;
973 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
974
975 // Sava mapped pointer for later validation.
976 validateStagingBuffers.push_back(stagingBufMappedPtr);
977 }
978 else
979 {
980 TEST(0 && "Images not currently supported.");
981 }
982 }
983
984 if(cmdBufferStarted)
985 {
986 EndSingleTimeCommands();
987
988 for(size_t validateIndex = 0;
989 validateIndex < validateStagingBuffers.size();
990 ++validateIndex)
991 {
992 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
993 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
994 TEST(validateSize % sizeof(uint32_t) == 0);
995 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
996 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
997 bool valid = true;
998 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
999 {
1000 if(*stagingValPtr != val)
1001 {
1002 valid = false;
1003 break;
1004 }
1005 ++stagingValPtr;
1006 ++val;
1007 }
1008 TEST(valid);
1009 }
1010
1011 stagingBufs.ReleaseAllBuffers();
1012 }
1013}
1014
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1016{
1017 outMemReq = {};
1018 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1019 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1020}
1021
1022static void CreateBuffer(
1023 VmaPool pool,
1024 const VkBufferCreateInfo& bufCreateInfo,
1025 bool persistentlyMapped,
1026 AllocInfo& outAllocInfo)
1027{
1028 outAllocInfo = {};
1029 outAllocInfo.m_BufferInfo = bufCreateInfo;
1030
1031 VmaAllocationCreateInfo allocCreateInfo = {};
1032 allocCreateInfo.pool = pool;
1033 if(persistentlyMapped)
1034 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1035
1036 VmaAllocationInfo vmaAllocInfo = {};
1037 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1038
1039 // Setup StartValue and fill.
1040 {
1041 outAllocInfo.m_StartValue = (uint32_t)rand();
1042 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001043 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001044 if(!persistentlyMapped)
1045 {
1046 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1047 }
1048
1049 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001050 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001051 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1052 data[i] = value++;
1053
1054 if(!persistentlyMapped)
1055 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1056 }
1057}
1058
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001059static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001060{
1061 outAllocation.m_Allocation = nullptr;
1062 outAllocation.m_Buffer = nullptr;
1063 outAllocation.m_Image = nullptr;
1064 outAllocation.m_StartValue = (uint32_t)rand();
1065
1066 VmaAllocationCreateInfo vmaMemReq;
1067 GetMemReq(vmaMemReq);
1068
1069 VmaAllocationInfo allocInfo;
1070
1071 const bool isBuffer = true;//(rand() & 0x1) != 0;
1072 const bool isLarge = (rand() % 16) == 0;
1073 if(isBuffer)
1074 {
1075 const uint32_t bufferSize = isLarge ?
1076 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1077 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1078
1079 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1080 bufferInfo.size = bufferSize;
1081 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1082
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001083 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001084 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001085 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001086 }
1087 else
1088 {
1089 const uint32_t imageSizeX = isLarge ?
1090 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1091 rand() % 1024 + 1; // 1 ... 1024
1092 const uint32_t imageSizeY = isLarge ?
1093 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1094 rand() % 1024 + 1; // 1 ... 1024
1095
1096 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1097 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1098 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1099 imageInfo.extent.width = imageSizeX;
1100 imageInfo.extent.height = imageSizeY;
1101 imageInfo.extent.depth = 1;
1102 imageInfo.mipLevels = 1;
1103 imageInfo.arrayLayers = 1;
1104 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1105 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1106 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1107 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1108
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001109 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001110 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001111 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001112 }
1113
1114 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1115 if(allocInfo.pMappedData == nullptr)
1116 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001117 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001118 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001119 }
1120
1121 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001122 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001123 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1124 data[i] = value++;
1125
1126 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001127 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001128}
1129
1130static void DestroyAllocation(const AllocInfo& allocation)
1131{
1132 if(allocation.m_Buffer)
1133 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1134 else
1135 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1136}
1137
1138static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1139{
1140 for(size_t i = allocations.size(); i--; )
1141 DestroyAllocation(allocations[i]);
1142 allocations.clear();
1143}
1144
1145static void ValidateAllocationData(const AllocInfo& allocation)
1146{
1147 VmaAllocationInfo allocInfo;
1148 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1149
1150 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1151 if(allocInfo.pMappedData == nullptr)
1152 {
1153 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001154 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001155 }
1156
1157 uint32_t value = allocation.m_StartValue;
1158 bool ok = true;
1159 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001160 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001161 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1162 {
1163 if(data[i] != value++)
1164 {
1165 ok = false;
1166 break;
1167 }
1168 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001169 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001170
1171 if(allocInfo.pMappedData == nullptr)
1172 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1173}
1174
1175static void RecreateAllocationResource(AllocInfo& allocation)
1176{
1177 VmaAllocationInfo allocInfo;
1178 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1179
1180 if(allocation.m_Buffer)
1181 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001182 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001183
Adam Sawicki1f84f622019-07-02 13:40:01 +02001184 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, g_Allocs, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001185 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001186
1187 // Just to silence validation layer warnings.
1188 VkMemoryRequirements vkMemReq;
1189 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001190 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001191
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001192 res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001193 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001194 }
1195 else
1196 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001197 vkDestroyImage(g_hDevice, allocation.m_Image, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001198
Adam Sawicki1f84f622019-07-02 13:40:01 +02001199 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, g_Allocs, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001200 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001201
1202 // Just to silence validation layer warnings.
1203 VkMemoryRequirements vkMemReq;
1204 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1205
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001206 res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001207 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001208 }
1209}
1210
1211static void Defragment(AllocInfo* allocs, size_t allocCount,
1212 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1213 VmaDefragmentationStats* defragmentationStats = nullptr)
1214{
1215 std::vector<VmaAllocation> vmaAllocs(allocCount);
1216 for(size_t i = 0; i < allocCount; ++i)
1217 vmaAllocs[i] = allocs[i].m_Allocation;
1218
1219 std::vector<VkBool32> allocChanged(allocCount);
1220
1221 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1222 defragmentationInfo, defragmentationStats) );
1223
1224 for(size_t i = 0; i < allocCount; ++i)
1225 {
1226 if(allocChanged[i])
1227 {
1228 RecreateAllocationResource(allocs[i]);
1229 }
1230 }
1231}
1232
1233static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1234{
1235 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1236 ValidateAllocationData(allocInfo);
1237 });
1238}
1239
1240void TestDefragmentationSimple()
1241{
1242 wprintf(L"Test defragmentation simple\n");
1243
1244 RandomNumberGenerator rand(667);
1245
1246 const VkDeviceSize BUF_SIZE = 0x10000;
1247 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1248
1249 const VkDeviceSize MIN_BUF_SIZE = 32;
1250 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1251 auto RandomBufSize = [&]() -> VkDeviceSize {
1252 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1253 };
1254
1255 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1256 bufCreateInfo.size = BUF_SIZE;
1257 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1258
1259 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1260 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1261
1262 uint32_t memTypeIndex = UINT32_MAX;
1263 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1264
1265 VmaPoolCreateInfo poolCreateInfo = {};
1266 poolCreateInfo.blockSize = BLOCK_SIZE;
1267 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1268
1269 VmaPool pool;
1270 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1271
Adam Sawickie1681912018-11-23 17:50:12 +01001272 // Defragmentation of empty pool.
1273 {
1274 VmaDefragmentationInfo2 defragInfo = {};
1275 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1276 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1277 defragInfo.poolCount = 1;
1278 defragInfo.pPools = &pool;
1279
1280 VmaDefragmentationStats defragStats = {};
1281 VmaDefragmentationContext defragCtx = nullptr;
1282 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1283 TEST(res >= VK_SUCCESS);
1284 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1285 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1286 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1287 }
1288
Adam Sawickib8333fb2018-03-13 16:15:53 +01001289 std::vector<AllocInfo> allocations;
1290
1291 // persistentlyMappedOption = 0 - not persistently mapped.
1292 // persistentlyMappedOption = 1 - persistently mapped.
1293 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1294 {
1295 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1296 const bool persistentlyMapped = persistentlyMappedOption != 0;
1297
1298 // # Test 1
1299 // Buffers of fixed size.
1300 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1301 // Expected result: at least 1 block freed.
1302 {
1303 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1304 {
1305 AllocInfo allocInfo;
1306 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1307 allocations.push_back(allocInfo);
1308 }
1309
1310 for(size_t i = 1; i < allocations.size(); ++i)
1311 {
1312 DestroyAllocation(allocations[i]);
1313 allocations.erase(allocations.begin() + i);
1314 }
1315
1316 VmaDefragmentationStats defragStats;
1317 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001318 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1319 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001320
1321 ValidateAllocationsData(allocations.data(), allocations.size());
1322
1323 DestroyAllAllocations(allocations);
1324 }
1325
1326 // # Test 2
1327 // Buffers of fixed size.
1328 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1329 // Expected result: Each of 4 interations makes some progress.
1330 {
1331 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1332 {
1333 AllocInfo allocInfo;
1334 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1335 allocations.push_back(allocInfo);
1336 }
1337
1338 for(size_t i = 1; i < allocations.size(); ++i)
1339 {
1340 DestroyAllocation(allocations[i]);
1341 allocations.erase(allocations.begin() + i);
1342 }
1343
1344 VmaDefragmentationInfo defragInfo = {};
1345 defragInfo.maxAllocationsToMove = 1;
1346 defragInfo.maxBytesToMove = BUF_SIZE;
1347
1348 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1349 {
1350 VmaDefragmentationStats defragStats;
1351 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001352 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001353 }
1354
1355 ValidateAllocationsData(allocations.data(), allocations.size());
1356
1357 DestroyAllAllocations(allocations);
1358 }
1359
1360 // # Test 3
1361 // Buffers of variable size.
1362 // Create a number of buffers. Remove some percent of them.
1363 // Defragment while having some percent of them unmovable.
1364 // Expected result: Just simple validation.
1365 {
1366 for(size_t i = 0; i < 100; ++i)
1367 {
1368 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1369 localBufCreateInfo.size = RandomBufSize();
1370
1371 AllocInfo allocInfo;
1372 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1373 allocations.push_back(allocInfo);
1374 }
1375
1376 const uint32_t percentToDelete = 60;
1377 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1378 for(size_t i = 0; i < numberToDelete; ++i)
1379 {
1380 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1381 DestroyAllocation(allocations[indexToDelete]);
1382 allocations.erase(allocations.begin() + indexToDelete);
1383 }
1384
1385 // Non-movable allocations will be at the beginning of allocations array.
1386 const uint32_t percentNonMovable = 20;
1387 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1388 for(size_t i = 0; i < numberNonMovable; ++i)
1389 {
1390 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1391 if(indexNonMovable != i)
1392 std::swap(allocations[i], allocations[indexNonMovable]);
1393 }
1394
1395 VmaDefragmentationStats defragStats;
1396 Defragment(
1397 allocations.data() + numberNonMovable,
1398 allocations.size() - numberNonMovable,
1399 nullptr, &defragStats);
1400
1401 ValidateAllocationsData(allocations.data(), allocations.size());
1402
1403 DestroyAllAllocations(allocations);
1404 }
1405 }
1406
Adam Sawicki647cf242018-11-23 17:58:00 +01001407 /*
1408 Allocation that must be move to an overlapping place using memmove().
1409 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1410 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001411 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001412 {
1413 AllocInfo allocInfo[2];
1414
1415 bufCreateInfo.size = BUF_SIZE;
1416 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1417 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1418 bufCreateInfo.size = biggerBufSize;
1419 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1420
1421 DestroyAllocation(allocInfo[0]);
1422
1423 VmaDefragmentationStats defragStats;
1424 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1425 // If this fails, it means we couldn't do memmove with overlapping regions.
1426 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1427
1428 ValidateAllocationsData(&allocInfo[1], 1);
1429 DestroyAllocation(allocInfo[1]);
1430 }
1431
Adam Sawickib8333fb2018-03-13 16:15:53 +01001432 vmaDestroyPool(g_hAllocator, pool);
1433}
1434
Adam Sawicki52076eb2018-11-22 16:14:50 +01001435void TestDefragmentationWholePool()
1436{
1437 wprintf(L"Test defragmentation whole pool\n");
1438
1439 RandomNumberGenerator rand(668);
1440
1441 const VkDeviceSize BUF_SIZE = 0x10000;
1442 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1443
1444 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1445 bufCreateInfo.size = BUF_SIZE;
1446 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1447
1448 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1449 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1450
1451 uint32_t memTypeIndex = UINT32_MAX;
1452 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1453
1454 VmaPoolCreateInfo poolCreateInfo = {};
1455 poolCreateInfo.blockSize = BLOCK_SIZE;
1456 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1457
1458 VmaDefragmentationStats defragStats[2];
1459 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1460 {
1461 VmaPool pool;
1462 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1463
1464 std::vector<AllocInfo> allocations;
1465
1466 // Buffers of fixed size.
1467 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1468 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1469 {
1470 AllocInfo allocInfo;
1471 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1472 allocations.push_back(allocInfo);
1473 }
1474
1475 for(size_t i = 1; i < allocations.size(); ++i)
1476 {
1477 DestroyAllocation(allocations[i]);
1478 allocations.erase(allocations.begin() + i);
1479 }
1480
1481 VmaDefragmentationInfo2 defragInfo = {};
1482 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1483 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1484 std::vector<VmaAllocation> allocationsToDefrag;
1485 if(caseIndex == 0)
1486 {
1487 defragInfo.poolCount = 1;
1488 defragInfo.pPools = &pool;
1489 }
1490 else
1491 {
1492 const size_t allocCount = allocations.size();
1493 allocationsToDefrag.resize(allocCount);
1494 std::transform(
1495 allocations.begin(), allocations.end(),
1496 allocationsToDefrag.begin(),
1497 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1498 defragInfo.allocationCount = (uint32_t)allocCount;
1499 defragInfo.pAllocations = allocationsToDefrag.data();
1500 }
1501
1502 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1503 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1504 TEST(res >= VK_SUCCESS);
1505 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1506
1507 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1508
1509 ValidateAllocationsData(allocations.data(), allocations.size());
1510
1511 DestroyAllAllocations(allocations);
1512
1513 vmaDestroyPool(g_hAllocator, pool);
1514 }
1515
1516 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1517 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1518 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1519 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1520}
1521
Adam Sawickib8333fb2018-03-13 16:15:53 +01001522void TestDefragmentationFull()
1523{
1524 std::vector<AllocInfo> allocations;
1525
1526 // Create initial allocations.
1527 for(size_t i = 0; i < 400; ++i)
1528 {
1529 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001530 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001531 allocations.push_back(allocation);
1532 }
1533
1534 // Delete random allocations
1535 const size_t allocationsToDeletePercent = 80;
1536 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1537 for(size_t i = 0; i < allocationsToDelete; ++i)
1538 {
1539 size_t index = (size_t)rand() % allocations.size();
1540 DestroyAllocation(allocations[index]);
1541 allocations.erase(allocations.begin() + index);
1542 }
1543
1544 for(size_t i = 0; i < allocations.size(); ++i)
1545 ValidateAllocationData(allocations[i]);
1546
Adam Sawicki0667e332018-08-24 17:26:44 +02001547 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001548
1549 {
1550 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1551 for(size_t i = 0; i < allocations.size(); ++i)
1552 vmaAllocations[i] = allocations[i].m_Allocation;
1553
1554 const size_t nonMovablePercent = 0;
1555 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1556 for(size_t i = 0; i < nonMovableCount; ++i)
1557 {
1558 size_t index = (size_t)rand() % vmaAllocations.size();
1559 vmaAllocations.erase(vmaAllocations.begin() + index);
1560 }
1561
1562 const uint32_t defragCount = 1;
1563 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1564 {
1565 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1566
1567 VmaDefragmentationInfo defragmentationInfo;
1568 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1569 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1570
1571 wprintf(L"Defragmentation #%u\n", defragIndex);
1572
1573 time_point begTime = std::chrono::high_resolution_clock::now();
1574
1575 VmaDefragmentationStats stats;
1576 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001577 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001578
1579 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1580
1581 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1582 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1583 wprintf(L"Time: %.2f s\n", defragmentDuration);
1584
1585 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1586 {
1587 if(allocationsChanged[i])
1588 {
1589 RecreateAllocationResource(allocations[i]);
1590 }
1591 }
1592
1593 for(size_t i = 0; i < allocations.size(); ++i)
1594 ValidateAllocationData(allocations[i]);
1595
Adam Sawicki0667e332018-08-24 17:26:44 +02001596 //wchar_t fileName[MAX_PATH];
1597 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1598 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001599 }
1600 }
1601
1602 // Destroy all remaining allocations.
1603 DestroyAllAllocations(allocations);
1604}
1605
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001606static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001607{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001608 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001609 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001610
1611 std::vector<AllocInfo> allocations;
1612
1613 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001614 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1615 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001616 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001617 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1618 const size_t percentToLeave = 30;
1619 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001620 RandomNumberGenerator rand = { 234522 };
1621
1622 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001623
1624 VmaAllocationCreateInfo allocCreateInfo = {};
1625 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001626 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001627
1628 // Create all intended buffers.
1629 for(size_t i = 0; i < bufCount; ++i)
1630 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001631 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1632
1633 if(rand.Generate() % 100 < percentNonMovable)
1634 {
1635 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1636 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1637 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1638 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1639 }
1640 else
1641 {
1642 // Different usage just to see different color in output from VmaDumpVis.
1643 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1644 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1645 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1646 // And in JSON dump.
1647 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1648 }
1649
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001650 AllocInfo alloc;
1651 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1652 alloc.m_StartValue = rand.Generate();
1653 allocations.push_back(alloc);
1654 }
1655
1656 // Destroy some percentage of them.
1657 {
1658 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1659 for(size_t i = 0; i < buffersToDestroy; ++i)
1660 {
1661 const size_t index = rand.Generate() % allocations.size();
1662 allocations[index].Destroy();
1663 allocations.erase(allocations.begin() + index);
1664 }
1665 }
1666
1667 // Fill them with meaningful data.
1668 UploadGpuData(allocations.data(), allocations.size());
1669
Adam Sawickic6ede152018-11-16 17:04:14 +01001670 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001671 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001672 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001673
1674 // Defragment using GPU only.
1675 {
1676 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001677
Adam Sawickic6ede152018-11-16 17:04:14 +01001678 std::vector<VmaAllocation> allocationPtrs;
1679 std::vector<VkBool32> allocationChanged;
1680 std::vector<size_t> allocationOriginalIndex;
1681
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001682 for(size_t i = 0; i < allocCount; ++i)
1683 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001684 VmaAllocationInfo allocInfo = {};
1685 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1686 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1687 {
1688 allocationPtrs.push_back(allocations[i].m_Allocation);
1689 allocationChanged.push_back(VK_FALSE);
1690 allocationOriginalIndex.push_back(i);
1691 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001692 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001693
1694 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001695
1696 BeginSingleTimeCommands();
1697
1698 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001699 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001700 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001701 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001702 defragInfo.pAllocationsChanged = allocationChanged.data();
1703 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001704 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1705 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1706
1707 VmaDefragmentationStats stats = {};
1708 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1709 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1710 TEST(res >= VK_SUCCESS);
1711
1712 EndSingleTimeCommands();
1713
1714 vmaDefragmentationEnd(g_hAllocator, ctx);
1715
Adam Sawickic6ede152018-11-16 17:04:14 +01001716 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001717 {
1718 if(allocationChanged[i])
1719 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001720 const size_t origAllocIndex = allocationOriginalIndex[i];
1721 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001722 }
1723 }
1724
Adam Sawicki4d844e22019-01-24 16:21:05 +01001725 // If corruption detection is enabled, GPU defragmentation may not work on
1726 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001727 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001728 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1729 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001730 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001731 }
1732
1733 ValidateGpuData(allocations.data(), allocations.size());
1734
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001735 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001736 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001737
1738 // Destroy all remaining buffers.
1739 for(size_t i = allocations.size(); i--; )
1740 {
1741 allocations[i].Destroy();
1742 }
Adam Sawicki05704002018-11-08 16:07:29 +01001743
1744 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001745}
1746
Adam Sawickib8333fb2018-03-13 16:15:53 +01001747static void TestUserData()
1748{
1749 VkResult res;
1750
1751 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1752 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1753 bufCreateInfo.size = 0x10000;
1754
1755 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1756 {
1757 // Opaque pointer
1758 {
1759
1760 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1761 void* pointerToSomething = &res;
1762
1763 VmaAllocationCreateInfo allocCreateInfo = {};
1764 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1765 allocCreateInfo.pUserData = numberAsPointer;
1766 if(testIndex == 1)
1767 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1768
1769 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1770 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001771 TEST(res == VK_SUCCESS);
1772 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001773
1774 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001775 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001776
1777 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1778 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001779 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001780
1781 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1782 }
1783
1784 // String
1785 {
1786 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1787 const char* name2 = "2";
1788 const size_t name1Len = strlen(name1);
1789
1790 char* name1Buf = new char[name1Len + 1];
1791 strcpy_s(name1Buf, name1Len + 1, name1);
1792
1793 VmaAllocationCreateInfo allocCreateInfo = {};
1794 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1795 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1796 allocCreateInfo.pUserData = name1Buf;
1797 if(testIndex == 1)
1798 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1799
1800 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1801 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001802 TEST(res == VK_SUCCESS);
1803 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1804 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001805
1806 delete[] name1Buf;
1807
1808 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001809 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001810
1811 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1812 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001813 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001814
1815 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1816 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001817 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001818
1819 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1820 }
1821 }
1822}
1823
Adam Sawicki370ab182018-11-08 16:31:00 +01001824static void TestInvalidAllocations()
1825{
1826 VkResult res;
1827
1828 VmaAllocationCreateInfo allocCreateInfo = {};
1829 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1830
1831 // Try to allocate 0 bytes.
1832 {
1833 VkMemoryRequirements memReq = {};
1834 memReq.size = 0; // !!!
1835 memReq.alignment = 4;
1836 memReq.memoryTypeBits = UINT32_MAX;
1837 VmaAllocation alloc = VK_NULL_HANDLE;
1838 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1839 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1840 }
1841
1842 // Try to create buffer with size = 0.
1843 {
1844 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1845 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1846 bufCreateInfo.size = 0; // !!!
1847 VkBuffer buf = VK_NULL_HANDLE;
1848 VmaAllocation alloc = VK_NULL_HANDLE;
1849 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1850 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1851 }
1852
1853 // Try to create image with one dimension = 0.
1854 {
1855 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1856 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1857 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1858 imageCreateInfo.extent.width = 128;
1859 imageCreateInfo.extent.height = 0; // !!!
1860 imageCreateInfo.extent.depth = 1;
1861 imageCreateInfo.mipLevels = 1;
1862 imageCreateInfo.arrayLayers = 1;
1863 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1864 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1865 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1866 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1867 VkImage image = VK_NULL_HANDLE;
1868 VmaAllocation alloc = VK_NULL_HANDLE;
1869 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1870 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1871 }
1872}
1873
Adam Sawickib8333fb2018-03-13 16:15:53 +01001874static void TestMemoryRequirements()
1875{
1876 VkResult res;
1877 VkBuffer buf;
1878 VmaAllocation alloc;
1879 VmaAllocationInfo allocInfo;
1880
1881 const VkPhysicalDeviceMemoryProperties* memProps;
1882 vmaGetMemoryProperties(g_hAllocator, &memProps);
1883
1884 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1885 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1886 bufInfo.size = 128;
1887
1888 VmaAllocationCreateInfo allocCreateInfo = {};
1889
1890 // No requirements.
1891 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001892 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001893 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1894
1895 // Usage.
1896 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1897 allocCreateInfo.requiredFlags = 0;
1898 allocCreateInfo.preferredFlags = 0;
1899 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1900
1901 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001902 TEST(res == VK_SUCCESS);
1903 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001904 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1905
1906 // Required flags, preferred flags.
1907 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1908 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1909 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1910 allocCreateInfo.memoryTypeBits = 0;
1911
1912 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001913 TEST(res == VK_SUCCESS);
1914 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1915 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001916 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1917
1918 // memoryTypeBits.
1919 const uint32_t memType = allocInfo.memoryType;
1920 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1921 allocCreateInfo.requiredFlags = 0;
1922 allocCreateInfo.preferredFlags = 0;
1923 allocCreateInfo.memoryTypeBits = 1u << memType;
1924
1925 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001926 TEST(res == VK_SUCCESS);
1927 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001928 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1929
1930}
1931
1932static void TestBasics()
1933{
1934 VkResult res;
1935
1936 TestMemoryRequirements();
1937
1938 // Lost allocation
1939 {
1940 VmaAllocation alloc = VK_NULL_HANDLE;
1941 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001942 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001943
1944 VmaAllocationInfo allocInfo;
1945 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001946 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1947 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001948
1949 vmaFreeMemory(g_hAllocator, alloc);
1950 }
1951
1952 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1953 {
1954 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1955 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1956 bufCreateInfo.size = 128;
1957
1958 VmaAllocationCreateInfo allocCreateInfo = {};
1959 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1960 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1961
1962 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1963 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001964 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001965
1966 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1967
1968 // Same with OWN_MEMORY.
1969 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1970
1971 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001972 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001973
1974 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1975 }
1976
1977 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001978
1979 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001980}
1981
1982void TestHeapSizeLimit()
1983{
1984 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1985 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1986
1987 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1988 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1989 {
1990 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1991 }
1992
1993 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1994 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1995 allocatorCreateInfo.device = g_hDevice;
1996 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1997
1998 VmaAllocator hAllocator;
1999 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002000 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002001
2002 struct Item
2003 {
2004 VkBuffer hBuf;
2005 VmaAllocation hAlloc;
2006 };
2007 std::vector<Item> items;
2008
2009 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2010 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2011
2012 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
2013 VmaAllocationInfo ownAllocInfo;
2014 {
2015 VmaAllocationCreateInfo allocCreateInfo = {};
2016 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2017 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2018
2019 bufCreateInfo.size = BLOCK_SIZE / 2;
2020
2021 for(size_t i = 0; i < 2; ++i)
2022 {
2023 Item item;
2024 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002025 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002026 items.push_back(item);
2027 }
2028 }
2029
2030 // Create pool to make sure allocations must be out of this memory type.
2031 VmaPoolCreateInfo poolCreateInfo = {};
2032 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
2033 poolCreateInfo.blockSize = BLOCK_SIZE;
2034
2035 VmaPool hPool;
2036 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002037 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002038
2039 // 2. Allocate normal buffers from all the remaining memory.
2040 {
2041 VmaAllocationCreateInfo allocCreateInfo = {};
2042 allocCreateInfo.pool = hPool;
2043
2044 bufCreateInfo.size = BLOCK_SIZE / 2;
2045
2046 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2047 for(size_t i = 0; i < bufCount; ++i)
2048 {
2049 Item item;
2050 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002051 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002052 items.push_back(item);
2053 }
2054 }
2055
2056 // 3. Allocation of one more (even small) buffer should fail.
2057 {
2058 VmaAllocationCreateInfo allocCreateInfo = {};
2059 allocCreateInfo.pool = hPool;
2060
2061 bufCreateInfo.size = 128;
2062
2063 VkBuffer hBuf;
2064 VmaAllocation hAlloc;
2065 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002066 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002067 }
2068
2069 // Destroy everything.
2070 for(size_t i = items.size(); i--; )
2071 {
2072 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2073 }
2074
2075 vmaDestroyPool(hAllocator, hPool);
2076
2077 vmaDestroyAllocator(hAllocator);
2078}
2079
Adam Sawicki212a4a62018-06-14 15:44:45 +02002080#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002081static void TestDebugMargin()
2082{
2083 if(VMA_DEBUG_MARGIN == 0)
2084 {
2085 return;
2086 }
2087
2088 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002089 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002090
2091 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002092 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002093
2094 // Create few buffers of different size.
2095 const size_t BUF_COUNT = 10;
2096 BufferInfo buffers[BUF_COUNT];
2097 VmaAllocationInfo allocInfo[BUF_COUNT];
2098 for(size_t i = 0; i < 10; ++i)
2099 {
2100 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002101 // Last one will be mapped.
2102 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002103
2104 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002105 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002106 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002107 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002108
2109 if(i == BUF_COUNT - 1)
2110 {
2111 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002112 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002113 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2114 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2115 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002116 }
2117
2118 // Check if their offsets preserve margin between them.
2119 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2120 {
2121 if(lhs.deviceMemory != rhs.deviceMemory)
2122 {
2123 return lhs.deviceMemory < rhs.deviceMemory;
2124 }
2125 return lhs.offset < rhs.offset;
2126 });
2127 for(size_t i = 1; i < BUF_COUNT; ++i)
2128 {
2129 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2130 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002131 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002132 }
2133 }
2134
Adam Sawicki212a4a62018-06-14 15:44:45 +02002135 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002136 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002137
Adam Sawicki73b16652018-06-11 16:39:25 +02002138 // Destroy all buffers.
2139 for(size_t i = BUF_COUNT; i--; )
2140 {
2141 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2142 }
2143}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002144#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002145
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002146static void TestLinearAllocator()
2147{
2148 wprintf(L"Test linear allocator\n");
2149
2150 RandomNumberGenerator rand{645332};
2151
2152 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2153 sampleBufCreateInfo.size = 1024; // Whatever.
2154 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2155
2156 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2157 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2158
2159 VmaPoolCreateInfo poolCreateInfo = {};
2160 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002161 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002162
Adam Sawickiee082772018-06-20 17:45:49 +02002163 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002164 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2165 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2166
2167 VmaPool pool = nullptr;
2168 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002169 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002170
2171 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2172
2173 VmaAllocationCreateInfo allocCreateInfo = {};
2174 allocCreateInfo.pool = pool;
2175
2176 constexpr size_t maxBufCount = 100;
2177 std::vector<BufferInfo> bufInfo;
2178
2179 constexpr VkDeviceSize bufSizeMin = 16;
2180 constexpr VkDeviceSize bufSizeMax = 1024;
2181 VmaAllocationInfo allocInfo;
2182 VkDeviceSize prevOffset = 0;
2183
2184 // Test one-time free.
2185 for(size_t i = 0; i < 2; ++i)
2186 {
2187 // Allocate number of buffers of varying size that surely fit into this block.
2188 VkDeviceSize bufSumSize = 0;
2189 for(size_t i = 0; i < maxBufCount; ++i)
2190 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002191 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002192 BufferInfo newBufInfo;
2193 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2194 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002195 TEST(res == VK_SUCCESS);
2196 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002197 bufInfo.push_back(newBufInfo);
2198 prevOffset = allocInfo.offset;
2199 bufSumSize += bufCreateInfo.size;
2200 }
2201
2202 // Validate pool stats.
2203 VmaPoolStats stats;
2204 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002205 TEST(stats.size == poolCreateInfo.blockSize);
2206 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2207 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002208
2209 // Destroy the buffers in random order.
2210 while(!bufInfo.empty())
2211 {
2212 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2213 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2214 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2215 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2216 }
2217 }
2218
2219 // Test stack.
2220 {
2221 // Allocate number of buffers of varying size that surely fit into this block.
2222 for(size_t i = 0; i < maxBufCount; ++i)
2223 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002224 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002225 BufferInfo newBufInfo;
2226 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2227 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002228 TEST(res == VK_SUCCESS);
2229 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002230 bufInfo.push_back(newBufInfo);
2231 prevOffset = allocInfo.offset;
2232 }
2233
2234 // Destroy few buffers from top of the stack.
2235 for(size_t i = 0; i < maxBufCount / 5; ++i)
2236 {
2237 const BufferInfo& currBufInfo = bufInfo.back();
2238 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2239 bufInfo.pop_back();
2240 }
2241
2242 // Create some more
2243 for(size_t i = 0; i < maxBufCount / 5; ++i)
2244 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002245 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002246 BufferInfo newBufInfo;
2247 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2248 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002249 TEST(res == VK_SUCCESS);
2250 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002251 bufInfo.push_back(newBufInfo);
2252 prevOffset = allocInfo.offset;
2253 }
2254
2255 // Destroy the buffers in reverse order.
2256 while(!bufInfo.empty())
2257 {
2258 const BufferInfo& currBufInfo = bufInfo.back();
2259 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2260 bufInfo.pop_back();
2261 }
2262 }
2263
Adam Sawickiee082772018-06-20 17:45:49 +02002264 // Test ring buffer.
2265 {
2266 // Allocate number of buffers that surely fit into this block.
2267 bufCreateInfo.size = bufSizeMax;
2268 for(size_t i = 0; i < maxBufCount; ++i)
2269 {
2270 BufferInfo newBufInfo;
2271 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2272 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002273 TEST(res == VK_SUCCESS);
2274 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002275 bufInfo.push_back(newBufInfo);
2276 prevOffset = allocInfo.offset;
2277 }
2278
2279 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2280 const size_t buffersPerIter = maxBufCount / 10 - 1;
2281 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2282 for(size_t iter = 0; iter < iterCount; ++iter)
2283 {
2284 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2285 {
2286 const BufferInfo& currBufInfo = bufInfo.front();
2287 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2288 bufInfo.erase(bufInfo.begin());
2289 }
2290 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2291 {
2292 BufferInfo newBufInfo;
2293 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2294 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002295 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002296 bufInfo.push_back(newBufInfo);
2297 }
2298 }
2299
2300 // Allocate buffers until we reach out-of-memory.
2301 uint32_t debugIndex = 0;
2302 while(res == VK_SUCCESS)
2303 {
2304 BufferInfo newBufInfo;
2305 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2306 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2307 if(res == VK_SUCCESS)
2308 {
2309 bufInfo.push_back(newBufInfo);
2310 }
2311 else
2312 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002313 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002314 }
2315 ++debugIndex;
2316 }
2317
2318 // Destroy the buffers in random order.
2319 while(!bufInfo.empty())
2320 {
2321 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2322 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2323 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2324 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2325 }
2326 }
2327
Adam Sawicki680b2252018-08-22 14:47:32 +02002328 // Test double stack.
2329 {
2330 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2331 VkDeviceSize prevOffsetLower = 0;
2332 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2333 for(size_t i = 0; i < maxBufCount; ++i)
2334 {
2335 const bool upperAddress = (i % 2) != 0;
2336 if(upperAddress)
2337 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2338 else
2339 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002340 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002341 BufferInfo newBufInfo;
2342 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2343 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002344 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002345 if(upperAddress)
2346 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002347 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002348 prevOffsetUpper = allocInfo.offset;
2349 }
2350 else
2351 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002352 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002353 prevOffsetLower = allocInfo.offset;
2354 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002355 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002356 bufInfo.push_back(newBufInfo);
2357 }
2358
2359 // Destroy few buffers from top of the stack.
2360 for(size_t i = 0; i < maxBufCount / 5; ++i)
2361 {
2362 const BufferInfo& currBufInfo = bufInfo.back();
2363 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2364 bufInfo.pop_back();
2365 }
2366
2367 // Create some more
2368 for(size_t i = 0; i < maxBufCount / 5; ++i)
2369 {
2370 const bool upperAddress = (i % 2) != 0;
2371 if(upperAddress)
2372 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2373 else
2374 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002375 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002376 BufferInfo newBufInfo;
2377 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2378 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002379 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002380 bufInfo.push_back(newBufInfo);
2381 }
2382
2383 // Destroy the buffers in reverse order.
2384 while(!bufInfo.empty())
2385 {
2386 const BufferInfo& currBufInfo = bufInfo.back();
2387 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2388 bufInfo.pop_back();
2389 }
2390
2391 // Create buffers on both sides until we reach out of memory.
2392 prevOffsetLower = 0;
2393 prevOffsetUpper = poolCreateInfo.blockSize;
2394 res = VK_SUCCESS;
2395 for(size_t i = 0; res == VK_SUCCESS; ++i)
2396 {
2397 const bool upperAddress = (i % 2) != 0;
2398 if(upperAddress)
2399 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2400 else
2401 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002402 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002403 BufferInfo newBufInfo;
2404 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2405 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2406 if(res == VK_SUCCESS)
2407 {
2408 if(upperAddress)
2409 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002410 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002411 prevOffsetUpper = allocInfo.offset;
2412 }
2413 else
2414 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002415 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002416 prevOffsetLower = allocInfo.offset;
2417 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002418 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002419 bufInfo.push_back(newBufInfo);
2420 }
2421 }
2422
2423 // Destroy the buffers in random order.
2424 while(!bufInfo.empty())
2425 {
2426 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2427 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2428 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2429 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2430 }
2431
2432 // Create buffers on upper side only, constant size, until we reach out of memory.
2433 prevOffsetUpper = poolCreateInfo.blockSize;
2434 res = VK_SUCCESS;
2435 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2436 bufCreateInfo.size = bufSizeMax;
2437 for(size_t i = 0; res == VK_SUCCESS; ++i)
2438 {
2439 BufferInfo newBufInfo;
2440 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2441 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2442 if(res == VK_SUCCESS)
2443 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002444 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002445 prevOffsetUpper = allocInfo.offset;
2446 bufInfo.push_back(newBufInfo);
2447 }
2448 }
2449
2450 // Destroy the buffers in reverse order.
2451 while(!bufInfo.empty())
2452 {
2453 const BufferInfo& currBufInfo = bufInfo.back();
2454 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2455 bufInfo.pop_back();
2456 }
2457 }
2458
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002459 // Test ring buffer with lost allocations.
2460 {
2461 // Allocate number of buffers until pool is full.
2462 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2463 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2464 res = VK_SUCCESS;
2465 for(size_t i = 0; res == VK_SUCCESS; ++i)
2466 {
2467 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2468
Adam Sawickifd366b62019-01-24 15:26:43 +01002469 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002470
2471 BufferInfo newBufInfo;
2472 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2473 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2474 if(res == VK_SUCCESS)
2475 bufInfo.push_back(newBufInfo);
2476 }
2477
2478 // Free first half of it.
2479 {
2480 const size_t buffersToDelete = bufInfo.size() / 2;
2481 for(size_t i = 0; i < buffersToDelete; ++i)
2482 {
2483 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2484 }
2485 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2486 }
2487
2488 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002489 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002490 res = VK_SUCCESS;
2491 for(size_t i = 0; res == VK_SUCCESS; ++i)
2492 {
2493 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2494
Adam Sawickifd366b62019-01-24 15:26:43 +01002495 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002496
2497 BufferInfo newBufInfo;
2498 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2499 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2500 if(res == VK_SUCCESS)
2501 bufInfo.push_back(newBufInfo);
2502 }
2503
2504 VkDeviceSize firstNewOffset;
2505 {
2506 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2507
2508 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2509 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2510 bufCreateInfo.size = bufSizeMax;
2511
2512 BufferInfo newBufInfo;
2513 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2514 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002515 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002516 bufInfo.push_back(newBufInfo);
2517 firstNewOffset = allocInfo.offset;
2518
2519 // Make sure at least one buffer from the beginning became lost.
2520 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002521 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002522 }
2523
Adam Sawickifd366b62019-01-24 15:26:43 +01002524#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002525 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2526 size_t newCount = 1;
2527 for(;;)
2528 {
2529 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2530
Adam Sawickifd366b62019-01-24 15:26:43 +01002531 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002532
2533 BufferInfo newBufInfo;
2534 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2535 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01002536
Adam Sawickib8d34d52018-10-03 17:41:20 +02002537 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002538 bufInfo.push_back(newBufInfo);
2539 ++newCount;
2540 if(allocInfo.offset < firstNewOffset)
2541 break;
2542 }
Adam Sawickifd366b62019-01-24 15:26:43 +01002543#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002544
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002545 // Delete buffers that are lost.
2546 for(size_t i = bufInfo.size(); i--; )
2547 {
2548 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2549 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2550 {
2551 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2552 bufInfo.erase(bufInfo.begin() + i);
2553 }
2554 }
2555
2556 // Test vmaMakePoolAllocationsLost
2557 {
2558 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2559
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01002560 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002561 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002562 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002563
2564 size_t realLostAllocCount = 0;
2565 for(size_t i = 0; i < bufInfo.size(); ++i)
2566 {
2567 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2568 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2569 ++realLostAllocCount;
2570 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002571 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002572 }
2573
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002574 // Destroy all the buffers in forward order.
2575 for(size_t i = 0; i < bufInfo.size(); ++i)
2576 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2577 bufInfo.clear();
2578 }
2579
Adam Sawicki70a683e2018-08-24 15:36:32 +02002580 vmaDestroyPool(g_hAllocator, pool);
2581}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002582
Adam Sawicki70a683e2018-08-24 15:36:32 +02002583static void TestLinearAllocatorMultiBlock()
2584{
2585 wprintf(L"Test linear allocator multi block\n");
2586
2587 RandomNumberGenerator rand{345673};
2588
2589 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2590 sampleBufCreateInfo.size = 1024 * 1024;
2591 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2592
2593 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2594 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2595
2596 VmaPoolCreateInfo poolCreateInfo = {};
2597 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2598 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002599 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002600
2601 VmaPool pool = nullptr;
2602 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002603 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002604
2605 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2606
2607 VmaAllocationCreateInfo allocCreateInfo = {};
2608 allocCreateInfo.pool = pool;
2609
2610 std::vector<BufferInfo> bufInfo;
2611 VmaAllocationInfo allocInfo;
2612
2613 // Test one-time free.
2614 {
2615 // Allocate buffers until we move to a second block.
2616 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2617 for(uint32_t i = 0; ; ++i)
2618 {
2619 BufferInfo newBufInfo;
2620 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2621 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002622 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002623 bufInfo.push_back(newBufInfo);
2624 if(lastMem && allocInfo.deviceMemory != lastMem)
2625 {
2626 break;
2627 }
2628 lastMem = allocInfo.deviceMemory;
2629 }
2630
Adam Sawickib8d34d52018-10-03 17:41:20 +02002631 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002632
2633 // Make sure that pool has now two blocks.
2634 VmaPoolStats poolStats = {};
2635 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002636 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002637
2638 // Destroy all the buffers in random order.
2639 while(!bufInfo.empty())
2640 {
2641 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2642 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2643 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2644 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2645 }
2646
2647 // Make sure that pool has now at most one block.
2648 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002649 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002650 }
2651
2652 // Test stack.
2653 {
2654 // Allocate buffers until we move to a second block.
2655 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2656 for(uint32_t i = 0; ; ++i)
2657 {
2658 BufferInfo newBufInfo;
2659 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2660 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002661 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002662 bufInfo.push_back(newBufInfo);
2663 if(lastMem && allocInfo.deviceMemory != lastMem)
2664 {
2665 break;
2666 }
2667 lastMem = allocInfo.deviceMemory;
2668 }
2669
Adam Sawickib8d34d52018-10-03 17:41:20 +02002670 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002671
2672 // Add few more buffers.
2673 for(uint32_t i = 0; i < 5; ++i)
2674 {
2675 BufferInfo newBufInfo;
2676 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2677 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002678 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002679 bufInfo.push_back(newBufInfo);
2680 }
2681
2682 // Make sure that pool has now two blocks.
2683 VmaPoolStats poolStats = {};
2684 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002685 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002686
2687 // Delete half of buffers, LIFO.
2688 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2689 {
2690 const BufferInfo& currBufInfo = bufInfo.back();
2691 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2692 bufInfo.pop_back();
2693 }
2694
2695 // Add one more buffer.
2696 BufferInfo newBufInfo;
2697 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2698 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002699 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002700 bufInfo.push_back(newBufInfo);
2701
2702 // Make sure that pool has now one block.
2703 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002704 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002705
2706 // Delete all the remaining buffers, LIFO.
2707 while(!bufInfo.empty())
2708 {
2709 const BufferInfo& currBufInfo = bufInfo.back();
2710 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2711 bufInfo.pop_back();
2712 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002713 }
2714
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002715 vmaDestroyPool(g_hAllocator, pool);
2716}
2717
Adam Sawickifd11d752018-08-22 15:02:10 +02002718static void ManuallyTestLinearAllocator()
2719{
2720 VmaStats origStats;
2721 vmaCalculateStats(g_hAllocator, &origStats);
2722
2723 wprintf(L"Manually test linear allocator\n");
2724
2725 RandomNumberGenerator rand{645332};
2726
2727 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2728 sampleBufCreateInfo.size = 1024; // Whatever.
2729 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2730
2731 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2732 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2733
2734 VmaPoolCreateInfo poolCreateInfo = {};
2735 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002736 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002737
2738 poolCreateInfo.blockSize = 10 * 1024;
2739 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2740 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2741
2742 VmaPool pool = nullptr;
2743 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002744 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002745
2746 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2747
2748 VmaAllocationCreateInfo allocCreateInfo = {};
2749 allocCreateInfo.pool = pool;
2750
2751 std::vector<BufferInfo> bufInfo;
2752 VmaAllocationInfo allocInfo;
2753 BufferInfo newBufInfo;
2754
2755 // Test double stack.
2756 {
2757 /*
2758 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2759 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2760
2761 Totally:
2762 1 block allocated
2763 10240 Vulkan bytes
2764 6 new allocations
2765 2256 bytes in allocations
2766 */
2767
2768 bufCreateInfo.size = 32;
2769 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2770 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002771 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002772 bufInfo.push_back(newBufInfo);
2773
2774 bufCreateInfo.size = 1024;
2775 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2776 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002777 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002778 bufInfo.push_back(newBufInfo);
2779
2780 bufCreateInfo.size = 32;
2781 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2782 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002783 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002784 bufInfo.push_back(newBufInfo);
2785
2786 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2787
2788 bufCreateInfo.size = 128;
2789 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2790 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002791 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002792 bufInfo.push_back(newBufInfo);
2793
2794 bufCreateInfo.size = 1024;
2795 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2796 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002797 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002798 bufInfo.push_back(newBufInfo);
2799
2800 bufCreateInfo.size = 16;
2801 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2802 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002803 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002804 bufInfo.push_back(newBufInfo);
2805
2806 VmaStats currStats;
2807 vmaCalculateStats(g_hAllocator, &currStats);
2808 VmaPoolStats poolStats;
2809 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2810
2811 char* statsStr = nullptr;
2812 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2813
2814 // PUT BREAKPOINT HERE TO CHECK.
2815 // Inspect: currStats versus origStats, poolStats, statsStr.
2816 int I = 0;
2817
2818 vmaFreeStatsString(g_hAllocator, statsStr);
2819
2820 // Destroy the buffers in reverse order.
2821 while(!bufInfo.empty())
2822 {
2823 const BufferInfo& currBufInfo = bufInfo.back();
2824 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2825 bufInfo.pop_back();
2826 }
2827 }
2828
2829 vmaDestroyPool(g_hAllocator, pool);
2830}
2831
Adam Sawicki80927152018-09-07 17:27:23 +02002832static void BenchmarkAlgorithmsCase(FILE* file,
2833 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002834 bool empty,
2835 VmaAllocationCreateFlags allocStrategy,
2836 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002837{
2838 RandomNumberGenerator rand{16223};
2839
2840 const VkDeviceSize bufSizeMin = 32;
2841 const VkDeviceSize bufSizeMax = 1024;
2842 const size_t maxBufCapacity = 10000;
2843 const uint32_t iterationCount = 10;
2844
2845 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2846 sampleBufCreateInfo.size = bufSizeMax;
2847 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2848
2849 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2850 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2851
2852 VmaPoolCreateInfo poolCreateInfo = {};
2853 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002854 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002855
2856 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002857 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002858 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2859
2860 VmaPool pool = nullptr;
2861 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002862 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002863
2864 // Buffer created just to get memory requirements. Never bound to any memory.
2865 VkBuffer dummyBuffer = VK_NULL_HANDLE;
Adam Sawicki1f84f622019-07-02 13:40:01 +02002866 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002867 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002868
2869 VkMemoryRequirements memReq = {};
2870 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2871
Adam Sawicki1f84f622019-07-02 13:40:01 +02002872 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawicki0a607132018-08-24 11:18:41 +02002873
2874 VmaAllocationCreateInfo allocCreateInfo = {};
2875 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002876 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002877
2878 VmaAllocation alloc;
2879 std::vector<VmaAllocation> baseAllocations;
2880
2881 if(!empty)
2882 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002883 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002884 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002885 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002886 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01002887 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
2888 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02002889 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2890 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002891 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002892 baseAllocations.push_back(alloc);
2893 totalSize += memReq.size;
2894 }
2895
2896 // Delete half of them, choose randomly.
2897 size_t allocsToDelete = baseAllocations.size() / 2;
2898 for(size_t i = 0; i < allocsToDelete; ++i)
2899 {
2900 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2901 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2902 baseAllocations.erase(baseAllocations.begin() + index);
2903 }
2904 }
2905
2906 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002907 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002908 std::vector<VmaAllocation> testAllocations;
2909 testAllocations.reserve(allocCount);
2910 duration allocTotalDuration = duration::zero();
2911 duration freeTotalDuration = duration::zero();
2912 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2913 {
2914 // Allocations
2915 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2916 for(size_t i = 0; i < allocCount; ++i)
2917 {
2918 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2919 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002920 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002921 testAllocations.push_back(alloc);
2922 }
2923 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2924
2925 // Deallocations
2926 switch(freeOrder)
2927 {
2928 case FREE_ORDER::FORWARD:
2929 // Leave testAllocations unchanged.
2930 break;
2931 case FREE_ORDER::BACKWARD:
2932 std::reverse(testAllocations.begin(), testAllocations.end());
2933 break;
2934 case FREE_ORDER::RANDOM:
2935 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2936 break;
2937 default: assert(0);
2938 }
2939
2940 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2941 for(size_t i = 0; i < allocCount; ++i)
2942 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2943 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2944
2945 testAllocations.clear();
2946 }
2947
2948 // Delete baseAllocations
2949 while(!baseAllocations.empty())
2950 {
2951 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2952 baseAllocations.pop_back();
2953 }
2954
2955 vmaDestroyPool(g_hAllocator, pool);
2956
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002957 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2958 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2959
Adam Sawicki80927152018-09-07 17:27:23 +02002960 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2961 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002962 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002963 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002964 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002965 allocTotalSeconds,
2966 freeTotalSeconds);
2967
2968 if(file)
2969 {
2970 std::string currTime;
2971 CurrentTimeToStr(currTime);
2972
Adam Sawicki80927152018-09-07 17:27:23 +02002973 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002974 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002975 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002976 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002977 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002978 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2979 allocTotalSeconds,
2980 freeTotalSeconds);
2981 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002982}
2983
Adam Sawicki80927152018-09-07 17:27:23 +02002984static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002985{
Adam Sawicki80927152018-09-07 17:27:23 +02002986 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002987
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002988 if(file)
2989 {
2990 fprintf(file,
2991 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002992 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002993 "Allocation time (s),Deallocation time (s)\n");
2994 }
2995
Adam Sawicki0a607132018-08-24 11:18:41 +02002996 uint32_t freeOrderCount = 1;
2997 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2998 freeOrderCount = 3;
2999 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
3000 freeOrderCount = 2;
3001
3002 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003003 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003004
3005 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3006 {
3007 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3008 switch(freeOrderIndex)
3009 {
3010 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3011 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3012 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3013 default: assert(0);
3014 }
3015
3016 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3017 {
Adam Sawicki80927152018-09-07 17:27:23 +02003018 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003019 {
Adam Sawicki80927152018-09-07 17:27:23 +02003020 uint32_t algorithm = 0;
3021 switch(algorithmIndex)
3022 {
3023 case 0:
3024 break;
3025 case 1:
3026 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3027 break;
3028 case 2:
3029 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3030 break;
3031 default:
3032 assert(0);
3033 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003034
Adam Sawicki80927152018-09-07 17:27:23 +02003035 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003036 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3037 {
3038 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003039 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003040 {
3041 switch(allocStrategyIndex)
3042 {
3043 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3044 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3045 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3046 default: assert(0);
3047 }
3048 }
3049
Adam Sawicki80927152018-09-07 17:27:23 +02003050 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003051 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003052 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003053 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003054 strategy,
3055 freeOrder); // freeOrder
3056 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003057 }
3058 }
3059 }
3060}
3061
Adam Sawickib8333fb2018-03-13 16:15:53 +01003062static void TestPool_SameSize()
3063{
3064 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3065 const size_t BUF_COUNT = 100;
3066 VkResult res;
3067
3068 RandomNumberGenerator rand{123};
3069
3070 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3071 bufferInfo.size = BUF_SIZE;
3072 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3073
3074 uint32_t memoryTypeBits = UINT32_MAX;
3075 {
3076 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003077 res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003078 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003079
3080 VkMemoryRequirements memReq;
3081 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3082 memoryTypeBits = memReq.memoryTypeBits;
3083
Adam Sawicki1f84f622019-07-02 13:40:01 +02003084 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003085 }
3086
3087 VmaAllocationCreateInfo poolAllocInfo = {};
3088 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3089 uint32_t memTypeIndex;
3090 res = vmaFindMemoryTypeIndex(
3091 g_hAllocator,
3092 memoryTypeBits,
3093 &poolAllocInfo,
3094 &memTypeIndex);
3095
3096 VmaPoolCreateInfo poolCreateInfo = {};
3097 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3098 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3099 poolCreateInfo.minBlockCount = 1;
3100 poolCreateInfo.maxBlockCount = 4;
3101 poolCreateInfo.frameInUseCount = 0;
3102
3103 VmaPool pool;
3104 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003105 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003106
Adam Sawickia020fb82019-11-02 14:43:06 +01003107 // Test pool name
3108 {
3109 static const char* const POOL_NAME = "Pool name";
3110 vmaSetPoolName(g_hAllocator, pool, POOL_NAME);
3111
3112 const char* fetchedPoolName = nullptr;
3113 vmaGetPoolName(g_hAllocator, pool, &fetchedPoolName);
3114 TEST(strcmp(fetchedPoolName, POOL_NAME) == 0);
3115
3116 SaveAllocatorStatsToFile(L"TEST.json");//DELME
3117
3118 vmaSetPoolName(g_hAllocator, pool, nullptr);
3119 }
3120
Adam Sawickib8333fb2018-03-13 16:15:53 +01003121 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3122
3123 VmaAllocationCreateInfo allocInfo = {};
3124 allocInfo.pool = pool;
3125 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3126 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3127
3128 struct BufItem
3129 {
3130 VkBuffer Buf;
3131 VmaAllocation Alloc;
3132 };
3133 std::vector<BufItem> items;
3134
3135 // Fill entire pool.
3136 for(size_t i = 0; i < BUF_COUNT; ++i)
3137 {
3138 BufItem item;
3139 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003140 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003141 items.push_back(item);
3142 }
3143
3144 // Make sure that another allocation would fail.
3145 {
3146 BufItem item;
3147 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003148 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003149 }
3150
3151 // Validate that no buffer is lost. Also check that they are not mapped.
3152 for(size_t i = 0; i < items.size(); ++i)
3153 {
3154 VmaAllocationInfo allocInfo;
3155 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003156 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3157 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003158 }
3159
3160 // Free some percent of random items.
3161 {
3162 const size_t PERCENT_TO_FREE = 10;
3163 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3164 for(size_t i = 0; i < itemsToFree; ++i)
3165 {
3166 size_t index = (size_t)rand.Generate() % items.size();
3167 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3168 items.erase(items.begin() + index);
3169 }
3170 }
3171
3172 // Randomly allocate and free items.
3173 {
3174 const size_t OPERATION_COUNT = BUF_COUNT;
3175 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3176 {
3177 bool allocate = rand.Generate() % 2 != 0;
3178 if(allocate)
3179 {
3180 if(items.size() < BUF_COUNT)
3181 {
3182 BufItem item;
3183 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003184 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003185 items.push_back(item);
3186 }
3187 }
3188 else // Free
3189 {
3190 if(!items.empty())
3191 {
3192 size_t index = (size_t)rand.Generate() % items.size();
3193 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3194 items.erase(items.begin() + index);
3195 }
3196 }
3197 }
3198 }
3199
3200 // Allocate up to maximum.
3201 while(items.size() < BUF_COUNT)
3202 {
3203 BufItem item;
3204 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003205 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003206 items.push_back(item);
3207 }
3208
3209 // Validate that no buffer is lost.
3210 for(size_t i = 0; i < items.size(); ++i)
3211 {
3212 VmaAllocationInfo allocInfo;
3213 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003214 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003215 }
3216
3217 // Next frame.
3218 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3219
3220 // Allocate another BUF_COUNT buffers.
3221 for(size_t i = 0; i < BUF_COUNT; ++i)
3222 {
3223 BufItem item;
3224 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003225 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003226 items.push_back(item);
3227 }
3228
3229 // Make sure the first BUF_COUNT is lost. Delete them.
3230 for(size_t i = 0; i < BUF_COUNT; ++i)
3231 {
3232 VmaAllocationInfo allocInfo;
3233 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003234 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003235 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3236 }
3237 items.erase(items.begin(), items.begin() + BUF_COUNT);
3238
3239 // Validate that no buffer is lost.
3240 for(size_t i = 0; i < items.size(); ++i)
3241 {
3242 VmaAllocationInfo allocInfo;
3243 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003244 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003245 }
3246
3247 // Free one item.
3248 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3249 items.pop_back();
3250
3251 // Validate statistics.
3252 {
3253 VmaPoolStats poolStats = {};
3254 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003255 TEST(poolStats.allocationCount == items.size());
3256 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3257 TEST(poolStats.unusedRangeCount == 1);
3258 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3259 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003260 }
3261
3262 // Free all remaining items.
3263 for(size_t i = items.size(); i--; )
3264 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3265 items.clear();
3266
3267 // Allocate maximum items again.
3268 for(size_t i = 0; i < BUF_COUNT; ++i)
3269 {
3270 BufItem item;
3271 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003272 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003273 items.push_back(item);
3274 }
3275
3276 // Delete every other item.
3277 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3278 {
3279 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3280 items.erase(items.begin() + i);
3281 }
3282
3283 // Defragment!
3284 {
3285 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3286 for(size_t i = 0; i < items.size(); ++i)
3287 allocationsToDefragment[i] = items[i].Alloc;
3288
3289 VmaDefragmentationStats defragmentationStats;
3290 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003291 TEST(res == VK_SUCCESS);
3292 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003293 }
3294
3295 // Free all remaining items.
3296 for(size_t i = items.size(); i--; )
3297 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3298 items.clear();
3299
3300 ////////////////////////////////////////////////////////////////////////////////
3301 // Test for vmaMakePoolAllocationsLost
3302
3303 // Allocate 4 buffers on frame 10.
3304 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3305 for(size_t i = 0; i < 4; ++i)
3306 {
3307 BufItem item;
3308 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003309 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003310 items.push_back(item);
3311 }
3312
3313 // Touch first 2 of them on frame 11.
3314 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3315 for(size_t i = 0; i < 2; ++i)
3316 {
3317 VmaAllocationInfo allocInfo;
3318 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3319 }
3320
3321 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3322 size_t lostCount = 0xDEADC0DE;
3323 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003324 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003325
3326 // Make another call. Now 0 should be lost.
3327 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003328 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003329
3330 // Make another call, with null count. Should not crash.
3331 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3332
3333 // END: Free all remaining items.
3334 for(size_t i = items.size(); i--; )
3335 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3336
3337 items.clear();
3338
Adam Sawickid2924172018-06-11 12:48:46 +02003339 ////////////////////////////////////////////////////////////////////////////////
3340 // Test for allocation too large for pool
3341
3342 {
3343 VmaAllocationCreateInfo allocCreateInfo = {};
3344 allocCreateInfo.pool = pool;
3345
3346 VkMemoryRequirements memReq;
3347 memReq.memoryTypeBits = UINT32_MAX;
3348 memReq.alignment = 1;
3349 memReq.size = poolCreateInfo.blockSize + 4;
3350
3351 VmaAllocation alloc = nullptr;
3352 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003353 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003354 }
3355
Adam Sawickib8333fb2018-03-13 16:15:53 +01003356 vmaDestroyPool(g_hAllocator, pool);
3357}
3358
Adam Sawickie44c6262018-06-15 14:30:39 +02003359static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3360{
3361 const uint8_t* pBytes = (const uint8_t*)pMemory;
3362 for(size_t i = 0; i < size; ++i)
3363 {
3364 if(pBytes[i] != pattern)
3365 {
3366 return false;
3367 }
3368 }
3369 return true;
3370}
3371
3372static void TestAllocationsInitialization()
3373{
3374 VkResult res;
3375
3376 const size_t BUF_SIZE = 1024;
3377
3378 // Create pool.
3379
3380 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3381 bufInfo.size = BUF_SIZE;
3382 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3383
3384 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3385 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3386
3387 VmaPoolCreateInfo poolCreateInfo = {};
3388 poolCreateInfo.blockSize = BUF_SIZE * 10;
3389 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3390 poolCreateInfo.maxBlockCount = 1;
3391 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003392 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003393
3394 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3395 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003396 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003397
3398 // Create one persistently mapped buffer to keep memory of this block mapped,
3399 // so that pointer to mapped data will remain (more or less...) valid even
3400 // after destruction of other allocations.
3401
3402 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3403 VkBuffer firstBuf;
3404 VmaAllocation firstAlloc;
3405 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003406 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003407
3408 // Test buffers.
3409
3410 for(uint32_t i = 0; i < 2; ++i)
3411 {
3412 const bool persistentlyMapped = i == 0;
3413 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3414 VkBuffer buf;
3415 VmaAllocation alloc;
3416 VmaAllocationInfo allocInfo;
3417 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003418 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003419
3420 void* pMappedData;
3421 if(!persistentlyMapped)
3422 {
3423 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003424 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003425 }
3426 else
3427 {
3428 pMappedData = allocInfo.pMappedData;
3429 }
3430
3431 // Validate initialized content
3432 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003433 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003434
3435 if(!persistentlyMapped)
3436 {
3437 vmaUnmapMemory(g_hAllocator, alloc);
3438 }
3439
3440 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3441
3442 // Validate freed content
3443 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003444 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003445 }
3446
3447 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3448 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3449}
3450
Adam Sawickib8333fb2018-03-13 16:15:53 +01003451static void TestPool_Benchmark(
3452 PoolTestResult& outResult,
3453 const PoolTestConfig& config)
3454{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003455 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003456
3457 RandomNumberGenerator mainRand{config.RandSeed};
3458
3459 uint32_t allocationSizeProbabilitySum = std::accumulate(
3460 config.AllocationSizes.begin(),
3461 config.AllocationSizes.end(),
3462 0u,
3463 [](uint32_t sum, const AllocationSize& allocSize) {
3464 return sum + allocSize.Probability;
3465 });
3466
3467 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3468 bufferInfo.size = 256; // Whatever.
3469 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3470
3471 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3472 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3473 imageInfo.extent.width = 256; // Whatever.
3474 imageInfo.extent.height = 256; // Whatever.
3475 imageInfo.extent.depth = 1;
3476 imageInfo.mipLevels = 1;
3477 imageInfo.arrayLayers = 1;
3478 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3479 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3480 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3481 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3482 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3483
3484 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3485 {
3486 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003487 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003488 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003489
3490 VkMemoryRequirements memReq;
3491 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3492 bufferMemoryTypeBits = memReq.memoryTypeBits;
3493
Adam Sawicki1f84f622019-07-02 13:40:01 +02003494 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003495 }
3496
3497 uint32_t imageMemoryTypeBits = UINT32_MAX;
3498 {
3499 VkImage dummyImage;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003500 VkResult res = vkCreateImage(g_hDevice, &imageInfo, g_Allocs, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003501 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003502
3503 VkMemoryRequirements memReq;
3504 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3505 imageMemoryTypeBits = memReq.memoryTypeBits;
3506
Adam Sawicki1f84f622019-07-02 13:40:01 +02003507 vkDestroyImage(g_hDevice, dummyImage, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003508 }
3509
3510 uint32_t memoryTypeBits = 0;
3511 if(config.UsesBuffers() && config.UsesImages())
3512 {
3513 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3514 if(memoryTypeBits == 0)
3515 {
3516 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3517 return;
3518 }
3519 }
3520 else if(config.UsesBuffers())
3521 memoryTypeBits = bufferMemoryTypeBits;
3522 else if(config.UsesImages())
3523 memoryTypeBits = imageMemoryTypeBits;
3524 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003525 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003526
3527 VmaPoolCreateInfo poolCreateInfo = {};
3528 poolCreateInfo.memoryTypeIndex = 0;
3529 poolCreateInfo.minBlockCount = 1;
3530 poolCreateInfo.maxBlockCount = 1;
3531 poolCreateInfo.blockSize = config.PoolSize;
3532 poolCreateInfo.frameInUseCount = 1;
3533
3534 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3535 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3536 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3537
3538 VmaPool pool;
3539 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003540 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003541
3542 // Start time measurement - after creating pool and initializing data structures.
3543 time_point timeBeg = std::chrono::high_resolution_clock::now();
3544
3545 ////////////////////////////////////////////////////////////////////////////////
3546 // ThreadProc
3547 auto ThreadProc = [&](
3548 PoolTestThreadResult* outThreadResult,
3549 uint32_t randSeed,
3550 HANDLE frameStartEvent,
3551 HANDLE frameEndEvent) -> void
3552 {
3553 RandomNumberGenerator threadRand{randSeed};
3554
3555 outThreadResult->AllocationTimeMin = duration::max();
3556 outThreadResult->AllocationTimeSum = duration::zero();
3557 outThreadResult->AllocationTimeMax = duration::min();
3558 outThreadResult->DeallocationTimeMin = duration::max();
3559 outThreadResult->DeallocationTimeSum = duration::zero();
3560 outThreadResult->DeallocationTimeMax = duration::min();
3561 outThreadResult->AllocationCount = 0;
3562 outThreadResult->DeallocationCount = 0;
3563 outThreadResult->LostAllocationCount = 0;
3564 outThreadResult->LostAllocationTotalSize = 0;
3565 outThreadResult->FailedAllocationCount = 0;
3566 outThreadResult->FailedAllocationTotalSize = 0;
3567
3568 struct Item
3569 {
3570 VkDeviceSize BufferSize;
3571 VkExtent2D ImageSize;
3572 VkBuffer Buf;
3573 VkImage Image;
3574 VmaAllocation Alloc;
3575
3576 VkDeviceSize CalcSizeBytes() const
3577 {
3578 return BufferSize +
3579 ImageSize.width * ImageSize.height * 4;
3580 }
3581 };
3582 std::vector<Item> unusedItems, usedItems;
3583
3584 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3585
3586 // Create all items - all unused, not yet allocated.
3587 for(size_t i = 0; i < threadTotalItemCount; ++i)
3588 {
3589 Item item = {};
3590
3591 uint32_t allocSizeIndex = 0;
3592 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3593 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3594 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3595
3596 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3597 if(allocSize.BufferSizeMax > 0)
3598 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003599 TEST(allocSize.BufferSizeMin > 0);
3600 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003601 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3602 item.BufferSize = allocSize.BufferSizeMin;
3603 else
3604 {
3605 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3606 item.BufferSize = item.BufferSize / 16 * 16;
3607 }
3608 }
3609 else
3610 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003611 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003612 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3613 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3614 else
3615 {
3616 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3617 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3618 }
3619 }
3620
3621 unusedItems.push_back(item);
3622 }
3623
3624 auto Allocate = [&](Item& item) -> VkResult
3625 {
3626 VmaAllocationCreateInfo allocCreateInfo = {};
3627 allocCreateInfo.pool = pool;
3628 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3629 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3630
3631 if(item.BufferSize)
3632 {
3633 bufferInfo.size = item.BufferSize;
3634 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3635 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3636 }
3637 else
3638 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003639 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003640
3641 imageInfo.extent.width = item.ImageSize.width;
3642 imageInfo.extent.height = item.ImageSize.height;
3643 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3644 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3645 }
3646 };
3647
3648 ////////////////////////////////////////////////////////////////////////////////
3649 // Frames
3650 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3651 {
3652 WaitForSingleObject(frameStartEvent, INFINITE);
3653
3654 // Always make some percent of used bufs unused, to choose different used ones.
3655 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3656 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3657 {
3658 size_t index = threadRand.Generate() % usedItems.size();
3659 unusedItems.push_back(usedItems[index]);
3660 usedItems.erase(usedItems.begin() + index);
3661 }
3662
3663 // Determine which bufs we want to use in this frame.
3664 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3665 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003666 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003667 // Move some used to unused.
3668 while(usedBufCount < usedItems.size())
3669 {
3670 size_t index = threadRand.Generate() % usedItems.size();
3671 unusedItems.push_back(usedItems[index]);
3672 usedItems.erase(usedItems.begin() + index);
3673 }
3674 // Move some unused to used.
3675 while(usedBufCount > usedItems.size())
3676 {
3677 size_t index = threadRand.Generate() % unusedItems.size();
3678 usedItems.push_back(unusedItems[index]);
3679 unusedItems.erase(unusedItems.begin() + index);
3680 }
3681
3682 uint32_t touchExistingCount = 0;
3683 uint32_t touchLostCount = 0;
3684 uint32_t createSucceededCount = 0;
3685 uint32_t createFailedCount = 0;
3686
3687 // Touch all used bufs. If not created or lost, allocate.
3688 for(size_t i = 0; i < usedItems.size(); ++i)
3689 {
3690 Item& item = usedItems[i];
3691 // Not yet created.
3692 if(item.Alloc == VK_NULL_HANDLE)
3693 {
3694 res = Allocate(item);
3695 ++outThreadResult->AllocationCount;
3696 if(res != VK_SUCCESS)
3697 {
3698 item.Alloc = VK_NULL_HANDLE;
3699 item.Buf = VK_NULL_HANDLE;
3700 ++outThreadResult->FailedAllocationCount;
3701 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3702 ++createFailedCount;
3703 }
3704 else
3705 ++createSucceededCount;
3706 }
3707 else
3708 {
3709 // Touch.
3710 VmaAllocationInfo allocInfo;
3711 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3712 // Lost.
3713 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3714 {
3715 ++touchLostCount;
3716
3717 // Destroy.
3718 {
3719 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3720 if(item.Buf)
3721 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3722 else
3723 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3724 ++outThreadResult->DeallocationCount;
3725 }
3726 item.Alloc = VK_NULL_HANDLE;
3727 item.Buf = VK_NULL_HANDLE;
3728
3729 ++outThreadResult->LostAllocationCount;
3730 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3731
3732 // Recreate.
3733 res = Allocate(item);
3734 ++outThreadResult->AllocationCount;
3735 // Creation failed.
3736 if(res != VK_SUCCESS)
3737 {
3738 ++outThreadResult->FailedAllocationCount;
3739 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3740 ++createFailedCount;
3741 }
3742 else
3743 ++createSucceededCount;
3744 }
3745 else
3746 ++touchExistingCount;
3747 }
3748 }
3749
3750 /*
3751 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3752 randSeed, frameIndex,
3753 touchExistingCount, touchLostCount,
3754 createSucceededCount, createFailedCount);
3755 */
3756
3757 SetEvent(frameEndEvent);
3758 }
3759
3760 // Free all remaining items.
3761 for(size_t i = usedItems.size(); i--; )
3762 {
3763 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3764 if(usedItems[i].Buf)
3765 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3766 else
3767 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3768 ++outThreadResult->DeallocationCount;
3769 }
3770 for(size_t i = unusedItems.size(); i--; )
3771 {
3772 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3773 if(unusedItems[i].Buf)
3774 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3775 else
3776 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3777 ++outThreadResult->DeallocationCount;
3778 }
3779 };
3780
3781 // Launch threads.
3782 uint32_t threadRandSeed = mainRand.Generate();
3783 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3784 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3785 std::vector<std::thread> bkgThreads;
3786 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3787 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3788 {
3789 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3790 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3791 bkgThreads.emplace_back(std::bind(
3792 ThreadProc,
3793 &threadResults[threadIndex],
3794 threadRandSeed + threadIndex,
3795 frameStartEvents[threadIndex],
3796 frameEndEvents[threadIndex]));
3797 }
3798
3799 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003800 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003801 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3802 {
3803 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3804 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3805 SetEvent(frameStartEvents[threadIndex]);
3806 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3807 }
3808
3809 // Wait for threads finished
3810 for(size_t i = 0; i < bkgThreads.size(); ++i)
3811 {
3812 bkgThreads[i].join();
3813 CloseHandle(frameEndEvents[i]);
3814 CloseHandle(frameStartEvents[i]);
3815 }
3816 bkgThreads.clear();
3817
3818 // Finish time measurement - before destroying pool.
3819 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3820
3821 vmaDestroyPool(g_hAllocator, pool);
3822
3823 outResult.AllocationTimeMin = duration::max();
3824 outResult.AllocationTimeAvg = duration::zero();
3825 outResult.AllocationTimeMax = duration::min();
3826 outResult.DeallocationTimeMin = duration::max();
3827 outResult.DeallocationTimeAvg = duration::zero();
3828 outResult.DeallocationTimeMax = duration::min();
3829 outResult.LostAllocationCount = 0;
3830 outResult.LostAllocationTotalSize = 0;
3831 outResult.FailedAllocationCount = 0;
3832 outResult.FailedAllocationTotalSize = 0;
3833 size_t allocationCount = 0;
3834 size_t deallocationCount = 0;
3835 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3836 {
3837 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3838 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3839 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3840 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3841 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3842 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3843 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3844 allocationCount += threadResult.AllocationCount;
3845 deallocationCount += threadResult.DeallocationCount;
3846 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3847 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3848 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3849 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3850 }
3851 if(allocationCount)
3852 outResult.AllocationTimeAvg /= allocationCount;
3853 if(deallocationCount)
3854 outResult.DeallocationTimeAvg /= deallocationCount;
3855}
3856
3857static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3858{
3859 if(ptr1 < ptr2)
3860 return ptr1 + size1 > ptr2;
3861 else if(ptr2 < ptr1)
3862 return ptr2 + size2 > ptr1;
3863 else
3864 return true;
3865}
3866
3867static void TestMapping()
3868{
3869 wprintf(L"Testing mapping...\n");
3870
3871 VkResult res;
3872 uint32_t memTypeIndex = UINT32_MAX;
3873
3874 enum TEST
3875 {
3876 TEST_NORMAL,
3877 TEST_POOL,
3878 TEST_DEDICATED,
3879 TEST_COUNT
3880 };
3881 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3882 {
3883 VmaPool pool = nullptr;
3884 if(testIndex == TEST_POOL)
3885 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003886 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003887 VmaPoolCreateInfo poolInfo = {};
3888 poolInfo.memoryTypeIndex = memTypeIndex;
3889 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003890 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003891 }
3892
3893 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3894 bufInfo.size = 0x10000;
3895 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3896
3897 VmaAllocationCreateInfo allocCreateInfo = {};
3898 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3899 allocCreateInfo.pool = pool;
3900 if(testIndex == TEST_DEDICATED)
3901 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3902
3903 VmaAllocationInfo allocInfo;
3904
3905 // Mapped manually
3906
3907 // Create 2 buffers.
3908 BufferInfo bufferInfos[3];
3909 for(size_t i = 0; i < 2; ++i)
3910 {
3911 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3912 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003913 TEST(res == VK_SUCCESS);
3914 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003915 memTypeIndex = allocInfo.memoryType;
3916 }
3917
3918 // Map buffer 0.
3919 char* data00 = nullptr;
3920 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003921 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003922 data00[0xFFFF] = data00[0];
3923
3924 // Map buffer 0 second time.
3925 char* data01 = nullptr;
3926 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003927 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003928
3929 // Map buffer 1.
3930 char* data1 = nullptr;
3931 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003932 TEST(res == VK_SUCCESS && data1 != nullptr);
3933 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01003934 data1[0xFFFF] = data1[0];
3935
3936 // Unmap buffer 0 two times.
3937 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3938 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3939 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003940 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003941
3942 // Unmap buffer 1.
3943 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
3944 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003945 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003946
3947 // Create 3rd buffer - persistently mapped.
3948 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3949 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3950 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003951 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003952
3953 // Map buffer 2.
3954 char* data2 = nullptr;
3955 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003956 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003957 data2[0xFFFF] = data2[0];
3958
3959 // Unmap buffer 2.
3960 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
3961 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003962 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003963
3964 // Destroy all buffers.
3965 for(size_t i = 3; i--; )
3966 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
3967
3968 vmaDestroyPool(g_hAllocator, pool);
3969 }
3970}
3971
Adam Sawickidaa6a552019-06-25 15:26:37 +02003972// Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
3973static void TestDeviceLocalMapped()
3974{
3975 VkResult res;
3976
3977 for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
3978 {
3979 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3980 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3981 bufCreateInfo.size = 4096;
3982
3983 VmaPool pool = VK_NULL_HANDLE;
3984 VmaAllocationCreateInfo allocCreateInfo = {};
3985 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3986 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3987 if(testIndex == 2)
3988 {
3989 VmaPoolCreateInfo poolCreateInfo = {};
3990 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3991 TEST(res == VK_SUCCESS);
3992 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
3993 TEST(res == VK_SUCCESS);
3994 allocCreateInfo.pool = pool;
3995 }
3996 else if(testIndex == 1)
3997 {
3998 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3999 }
4000
4001 VkBuffer buf = VK_NULL_HANDLE;
4002 VmaAllocation alloc = VK_NULL_HANDLE;
4003 VmaAllocationInfo allocInfo = {};
4004 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
4005 TEST(res == VK_SUCCESS && alloc);
4006
4007 VkMemoryPropertyFlags memTypeFlags = 0;
4008 vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
4009 const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
4010 TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
4011
4012 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4013 vmaDestroyPool(g_hAllocator, pool);
4014 }
4015}
4016
Adam Sawickib8333fb2018-03-13 16:15:53 +01004017static void TestMappingMultithreaded()
4018{
4019 wprintf(L"Testing mapping multithreaded...\n");
4020
4021 static const uint32_t threadCount = 16;
4022 static const uint32_t bufferCount = 1024;
4023 static const uint32_t threadBufferCount = bufferCount / threadCount;
4024
4025 VkResult res;
4026 volatile uint32_t memTypeIndex = UINT32_MAX;
4027
4028 enum TEST
4029 {
4030 TEST_NORMAL,
4031 TEST_POOL,
4032 TEST_DEDICATED,
4033 TEST_COUNT
4034 };
4035 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4036 {
4037 VmaPool pool = nullptr;
4038 if(testIndex == TEST_POOL)
4039 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004040 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004041 VmaPoolCreateInfo poolInfo = {};
4042 poolInfo.memoryTypeIndex = memTypeIndex;
4043 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004044 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004045 }
4046
4047 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4048 bufCreateInfo.size = 0x10000;
4049 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4050
4051 VmaAllocationCreateInfo allocCreateInfo = {};
4052 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4053 allocCreateInfo.pool = pool;
4054 if(testIndex == TEST_DEDICATED)
4055 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4056
4057 std::thread threads[threadCount];
4058 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4059 {
4060 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4061 // ======== THREAD FUNCTION ========
4062
4063 RandomNumberGenerator rand{threadIndex};
4064
4065 enum class MODE
4066 {
4067 // Don't map this buffer at all.
4068 DONT_MAP,
4069 // Map and quickly unmap.
4070 MAP_FOR_MOMENT,
4071 // Map and unmap before destruction.
4072 MAP_FOR_LONGER,
4073 // Map two times. Quickly unmap, second unmap before destruction.
4074 MAP_TWO_TIMES,
4075 // Create this buffer as persistently mapped.
4076 PERSISTENTLY_MAPPED,
4077 COUNT
4078 };
4079 std::vector<BufferInfo> bufInfos{threadBufferCount};
4080 std::vector<MODE> bufModes{threadBufferCount};
4081
4082 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4083 {
4084 BufferInfo& bufInfo = bufInfos[bufferIndex];
4085 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4086 bufModes[bufferIndex] = mode;
4087
4088 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4089 if(mode == MODE::PERSISTENTLY_MAPPED)
4090 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4091
4092 VmaAllocationInfo allocInfo;
4093 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4094 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004095 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004096
4097 if(memTypeIndex == UINT32_MAX)
4098 memTypeIndex = allocInfo.memoryType;
4099
4100 char* data = nullptr;
4101
4102 if(mode == MODE::PERSISTENTLY_MAPPED)
4103 {
4104 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004105 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004106 }
4107 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4108 mode == MODE::MAP_TWO_TIMES)
4109 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004110 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004111 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004112 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004113
4114 if(mode == MODE::MAP_TWO_TIMES)
4115 {
4116 char* data2 = nullptr;
4117 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004118 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004119 }
4120 }
4121 else if(mode == MODE::DONT_MAP)
4122 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004123 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004124 }
4125 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004126 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004127
4128 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4129 if(data)
4130 data[0xFFFF] = data[0];
4131
4132 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4133 {
4134 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4135
4136 VmaAllocationInfo allocInfo;
4137 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4138 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004139 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004140 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004141 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004142 }
4143
4144 switch(rand.Generate() % 3)
4145 {
4146 case 0: Sleep(0); break; // Yield.
4147 case 1: Sleep(10); break; // 10 ms
4148 // default: No sleep.
4149 }
4150
4151 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4152 if(data)
4153 data[0xFFFF] = data[0];
4154 }
4155
4156 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4157 {
4158 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4159 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4160 {
4161 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4162
4163 VmaAllocationInfo allocInfo;
4164 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004165 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004166 }
4167
4168 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4169 }
4170 });
4171 }
4172
4173 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4174 threads[threadIndex].join();
4175
4176 vmaDestroyPool(g_hAllocator, pool);
4177 }
4178}
4179
4180static void WriteMainTestResultHeader(FILE* file)
4181{
4182 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004183 "Code,Time,"
4184 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004185 "Total Time (us),"
4186 "Allocation Time Min (us),"
4187 "Allocation Time Avg (us),"
4188 "Allocation Time Max (us),"
4189 "Deallocation Time Min (us),"
4190 "Deallocation Time Avg (us),"
4191 "Deallocation Time Max (us),"
4192 "Total Memory Allocated (B),"
4193 "Free Range Size Avg (B),"
4194 "Free Range Size Max (B)\n");
4195}
4196
4197static void WriteMainTestResult(
4198 FILE* file,
4199 const char* codeDescription,
4200 const char* testDescription,
4201 const Config& config, const Result& result)
4202{
4203 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4204 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4205 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4206 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4207 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4208 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4209 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4210
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004211 std::string currTime;
4212 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004213
4214 fprintf(file,
4215 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004216 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4217 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004218 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004219 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004220 totalTimeSeconds * 1e6f,
4221 allocationTimeMinSeconds * 1e6f,
4222 allocationTimeAvgSeconds * 1e6f,
4223 allocationTimeMaxSeconds * 1e6f,
4224 deallocationTimeMinSeconds * 1e6f,
4225 deallocationTimeAvgSeconds * 1e6f,
4226 deallocationTimeMaxSeconds * 1e6f,
4227 result.TotalMemoryAllocated,
4228 result.FreeRangeSizeAvg,
4229 result.FreeRangeSizeMax);
4230}
4231
4232static void WritePoolTestResultHeader(FILE* file)
4233{
4234 fprintf(file,
4235 "Code,Test,Time,"
4236 "Config,"
4237 "Total Time (us),"
4238 "Allocation Time Min (us),"
4239 "Allocation Time Avg (us),"
4240 "Allocation Time Max (us),"
4241 "Deallocation Time Min (us),"
4242 "Deallocation Time Avg (us),"
4243 "Deallocation Time Max (us),"
4244 "Lost Allocation Count,"
4245 "Lost Allocation Total Size (B),"
4246 "Failed Allocation Count,"
4247 "Failed Allocation Total Size (B)\n");
4248}
4249
4250static void WritePoolTestResult(
4251 FILE* file,
4252 const char* codeDescription,
4253 const char* testDescription,
4254 const PoolTestConfig& config,
4255 const PoolTestResult& result)
4256{
4257 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4258 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4259 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4260 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4261 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4262 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4263 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4264
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004265 std::string currTime;
4266 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004267
4268 fprintf(file,
4269 "%s,%s,%s,"
4270 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4271 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4272 // General
4273 codeDescription,
4274 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004275 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004276 // Config
4277 config.ThreadCount,
4278 (unsigned long long)config.PoolSize,
4279 config.FrameCount,
4280 config.TotalItemCount,
4281 config.UsedItemCountMin,
4282 config.UsedItemCountMax,
4283 config.ItemsToMakeUnusedPercent,
4284 // Results
4285 totalTimeSeconds * 1e6f,
4286 allocationTimeMinSeconds * 1e6f,
4287 allocationTimeAvgSeconds * 1e6f,
4288 allocationTimeMaxSeconds * 1e6f,
4289 deallocationTimeMinSeconds * 1e6f,
4290 deallocationTimeAvgSeconds * 1e6f,
4291 deallocationTimeMaxSeconds * 1e6f,
4292 result.LostAllocationCount,
4293 result.LostAllocationTotalSize,
4294 result.FailedAllocationCount,
4295 result.FailedAllocationTotalSize);
4296}
4297
4298static void PerformCustomMainTest(FILE* file)
4299{
4300 Config config{};
4301 config.RandSeed = 65735476;
4302 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4303 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4304 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4305 config.FreeOrder = FREE_ORDER::FORWARD;
4306 config.ThreadCount = 16;
4307 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004308 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004309
4310 // Buffers
4311 //config.AllocationSizes.push_back({4, 16, 1024});
4312 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4313
4314 // Images
4315 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4316 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4317
4318 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4319 config.AdditionalOperationCount = 1024;
4320
4321 Result result{};
4322 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004323 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004324 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4325}
4326
4327static void PerformCustomPoolTest(FILE* file)
4328{
4329 PoolTestConfig config;
4330 config.PoolSize = 100 * 1024 * 1024;
4331 config.RandSeed = 2345764;
4332 config.ThreadCount = 1;
4333 config.FrameCount = 200;
4334 config.ItemsToMakeUnusedPercent = 2;
4335
4336 AllocationSize allocSize = {};
4337 allocSize.BufferSizeMin = 1024;
4338 allocSize.BufferSizeMax = 1024 * 1024;
4339 allocSize.Probability = 1;
4340 config.AllocationSizes.push_back(allocSize);
4341
4342 allocSize.BufferSizeMin = 0;
4343 allocSize.BufferSizeMax = 0;
4344 allocSize.ImageSizeMin = 128;
4345 allocSize.ImageSizeMax = 1024;
4346 allocSize.Probability = 1;
4347 config.AllocationSizes.push_back(allocSize);
4348
4349 config.PoolSize = config.CalcAvgResourceSize() * 200;
4350 config.UsedItemCountMax = 160;
4351 config.TotalItemCount = config.UsedItemCountMax * 10;
4352 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4353
4354 g_MemoryAliasingWarningEnabled = false;
4355 PoolTestResult result = {};
4356 TestPool_Benchmark(result, config);
4357 g_MemoryAliasingWarningEnabled = true;
4358
4359 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4360}
4361
Adam Sawickib8333fb2018-03-13 16:15:53 +01004362static void PerformMainTests(FILE* file)
4363{
4364 uint32_t repeatCount = 1;
4365 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4366
4367 Config config{};
4368 config.RandSeed = 65735476;
4369 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4370 config.FreeOrder = FREE_ORDER::FORWARD;
4371
4372 size_t threadCountCount = 1;
4373 switch(ConfigType)
4374 {
4375 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4376 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4377 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4378 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4379 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4380 default: assert(0);
4381 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004382
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004383 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004384
Adam Sawickib8333fb2018-03-13 16:15:53 +01004385 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4386 {
4387 std::string desc1;
4388
4389 switch(threadCountIndex)
4390 {
4391 case 0:
4392 desc1 += "1_thread";
4393 config.ThreadCount = 1;
4394 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4395 break;
4396 case 1:
4397 desc1 += "16_threads+0%_common";
4398 config.ThreadCount = 16;
4399 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4400 break;
4401 case 2:
4402 desc1 += "16_threads+50%_common";
4403 config.ThreadCount = 16;
4404 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4405 break;
4406 case 3:
4407 desc1 += "16_threads+100%_common";
4408 config.ThreadCount = 16;
4409 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4410 break;
4411 case 4:
4412 desc1 += "2_threads+0%_common";
4413 config.ThreadCount = 2;
4414 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4415 break;
4416 case 5:
4417 desc1 += "2_threads+50%_common";
4418 config.ThreadCount = 2;
4419 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4420 break;
4421 case 6:
4422 desc1 += "2_threads+100%_common";
4423 config.ThreadCount = 2;
4424 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4425 break;
4426 default:
4427 assert(0);
4428 }
4429
4430 // 0 = buffers, 1 = images, 2 = buffers and images
4431 size_t buffersVsImagesCount = 2;
4432 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4433 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4434 {
4435 std::string desc2 = desc1;
4436 switch(buffersVsImagesIndex)
4437 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004438 case 0: desc2 += ",Buffers"; break;
4439 case 1: desc2 += ",Images"; break;
4440 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004441 default: assert(0);
4442 }
4443
4444 // 0 = small, 1 = large, 2 = small and large
4445 size_t smallVsLargeCount = 2;
4446 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4447 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4448 {
4449 std::string desc3 = desc2;
4450 switch(smallVsLargeIndex)
4451 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004452 case 0: desc3 += ",Small"; break;
4453 case 1: desc3 += ",Large"; break;
4454 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004455 default: assert(0);
4456 }
4457
4458 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4459 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4460 else
4461 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4462
4463 // 0 = varying sizes min...max, 1 = set of constant sizes
4464 size_t constantSizesCount = 1;
4465 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4466 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4467 {
4468 std::string desc4 = desc3;
4469 switch(constantSizesIndex)
4470 {
4471 case 0: desc4 += " Varying_sizes"; break;
4472 case 1: desc4 += " Constant_sizes"; break;
4473 default: assert(0);
4474 }
4475
4476 config.AllocationSizes.clear();
4477 // Buffers present
4478 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4479 {
4480 // Small
4481 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4482 {
4483 // Varying size
4484 if(constantSizesIndex == 0)
4485 config.AllocationSizes.push_back({4, 16, 1024});
4486 // Constant sizes
4487 else
4488 {
4489 config.AllocationSizes.push_back({1, 16, 16});
4490 config.AllocationSizes.push_back({1, 64, 64});
4491 config.AllocationSizes.push_back({1, 256, 256});
4492 config.AllocationSizes.push_back({1, 1024, 1024});
4493 }
4494 }
4495 // Large
4496 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4497 {
4498 // Varying size
4499 if(constantSizesIndex == 0)
4500 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4501 // Constant sizes
4502 else
4503 {
4504 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4505 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4506 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4507 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4508 }
4509 }
4510 }
4511 // Images present
4512 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4513 {
4514 // Small
4515 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4516 {
4517 // Varying size
4518 if(constantSizesIndex == 0)
4519 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4520 // Constant sizes
4521 else
4522 {
4523 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4524 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4525 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4526 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4527 }
4528 }
4529 // Large
4530 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4531 {
4532 // Varying size
4533 if(constantSizesIndex == 0)
4534 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4535 // Constant sizes
4536 else
4537 {
4538 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4539 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4540 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4541 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4542 }
4543 }
4544 }
4545
4546 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4547 size_t beginBytesToAllocateCount = 1;
4548 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4549 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4550 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4551 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4552 {
4553 std::string desc5 = desc4;
4554
4555 switch(beginBytesToAllocateIndex)
4556 {
4557 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004558 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004559 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4560 config.AdditionalOperationCount = 0;
4561 break;
4562 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004563 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004564 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4565 config.AdditionalOperationCount = 1024;
4566 break;
4567 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004568 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004569 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4570 config.AdditionalOperationCount = 1024;
4571 break;
4572 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004573 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004574 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4575 config.AdditionalOperationCount = 1024;
4576 break;
4577 default:
4578 assert(0);
4579 }
4580
Adam Sawicki0667e332018-08-24 17:26:44 +02004581 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004582 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004583 std::string desc6 = desc5;
4584 switch(strategyIndex)
4585 {
4586 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004587 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004588 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4589 break;
4590 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004591 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004592 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4593 break;
4594 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004595 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004596 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4597 break;
4598 default:
4599 assert(0);
4600 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004601
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004602 desc6 += ',';
4603 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004604
4605 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004606
4607 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4608 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004609 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004610
4611 Result result{};
4612 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004613 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004614 if(file)
4615 {
4616 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4617 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004618 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004619 }
4620 }
4621 }
4622 }
4623 }
4624 }
4625}
4626
4627static void PerformPoolTests(FILE* file)
4628{
4629 const size_t AVG_RESOURCES_PER_POOL = 300;
4630
4631 uint32_t repeatCount = 1;
4632 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4633
4634 PoolTestConfig config{};
4635 config.RandSeed = 2346343;
4636 config.FrameCount = 200;
4637 config.ItemsToMakeUnusedPercent = 2;
4638
4639 size_t threadCountCount = 1;
4640 switch(ConfigType)
4641 {
4642 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4643 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4644 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4645 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4646 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4647 default: assert(0);
4648 }
4649 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4650 {
4651 std::string desc1;
4652
4653 switch(threadCountIndex)
4654 {
4655 case 0:
4656 desc1 += "1_thread";
4657 config.ThreadCount = 1;
4658 break;
4659 case 1:
4660 desc1 += "16_threads";
4661 config.ThreadCount = 16;
4662 break;
4663 case 2:
4664 desc1 += "2_threads";
4665 config.ThreadCount = 2;
4666 break;
4667 default:
4668 assert(0);
4669 }
4670
4671 // 0 = buffers, 1 = images, 2 = buffers and images
4672 size_t buffersVsImagesCount = 2;
4673 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4674 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4675 {
4676 std::string desc2 = desc1;
4677 switch(buffersVsImagesIndex)
4678 {
4679 case 0: desc2 += " Buffers"; break;
4680 case 1: desc2 += " Images"; break;
4681 case 2: desc2 += " Buffers+Images"; break;
4682 default: assert(0);
4683 }
4684
4685 // 0 = small, 1 = large, 2 = small and large
4686 size_t smallVsLargeCount = 2;
4687 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4688 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4689 {
4690 std::string desc3 = desc2;
4691 switch(smallVsLargeIndex)
4692 {
4693 case 0: desc3 += " Small"; break;
4694 case 1: desc3 += " Large"; break;
4695 case 2: desc3 += " Small+Large"; break;
4696 default: assert(0);
4697 }
4698
4699 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4700 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4701 else
4702 config.PoolSize = 4ull * 1024 * 1024;
4703
4704 // 0 = varying sizes min...max, 1 = set of constant sizes
4705 size_t constantSizesCount = 1;
4706 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4707 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4708 {
4709 std::string desc4 = desc3;
4710 switch(constantSizesIndex)
4711 {
4712 case 0: desc4 += " Varying_sizes"; break;
4713 case 1: desc4 += " Constant_sizes"; break;
4714 default: assert(0);
4715 }
4716
4717 config.AllocationSizes.clear();
4718 // Buffers present
4719 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4720 {
4721 // Small
4722 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4723 {
4724 // Varying size
4725 if(constantSizesIndex == 0)
4726 config.AllocationSizes.push_back({4, 16, 1024});
4727 // Constant sizes
4728 else
4729 {
4730 config.AllocationSizes.push_back({1, 16, 16});
4731 config.AllocationSizes.push_back({1, 64, 64});
4732 config.AllocationSizes.push_back({1, 256, 256});
4733 config.AllocationSizes.push_back({1, 1024, 1024});
4734 }
4735 }
4736 // Large
4737 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4738 {
4739 // Varying size
4740 if(constantSizesIndex == 0)
4741 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4742 // Constant sizes
4743 else
4744 {
4745 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4746 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4747 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4748 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4749 }
4750 }
4751 }
4752 // Images present
4753 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4754 {
4755 // Small
4756 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4757 {
4758 // Varying size
4759 if(constantSizesIndex == 0)
4760 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4761 // Constant sizes
4762 else
4763 {
4764 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4765 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4766 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4767 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4768 }
4769 }
4770 // Large
4771 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4772 {
4773 // Varying size
4774 if(constantSizesIndex == 0)
4775 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4776 // Constant sizes
4777 else
4778 {
4779 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4780 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4781 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4782 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4783 }
4784 }
4785 }
4786
4787 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4788 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4789
4790 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4791 size_t subscriptionModeCount;
4792 switch(ConfigType)
4793 {
4794 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4795 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4796 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4797 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4798 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4799 default: assert(0);
4800 }
4801 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4802 {
4803 std::string desc5 = desc4;
4804
4805 switch(subscriptionModeIndex)
4806 {
4807 case 0:
4808 desc5 += " Subscription_66%";
4809 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4810 break;
4811 case 1:
4812 desc5 += " Subscription_133%";
4813 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4814 break;
4815 case 2:
4816 desc5 += " Subscription_100%";
4817 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4818 break;
4819 case 3:
4820 desc5 += " Subscription_33%";
4821 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4822 break;
4823 case 4:
4824 desc5 += " Subscription_166%";
4825 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4826 break;
4827 default:
4828 assert(0);
4829 }
4830
4831 config.TotalItemCount = config.UsedItemCountMax * 5;
4832 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4833
4834 const char* testDescription = desc5.c_str();
4835
4836 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4837 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004838 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004839
4840 PoolTestResult result{};
4841 g_MemoryAliasingWarningEnabled = false;
4842 TestPool_Benchmark(result, config);
4843 g_MemoryAliasingWarningEnabled = true;
4844 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4845 }
4846 }
4847 }
4848 }
4849 }
4850 }
4851}
4852
Adam Sawickia83793a2018-09-03 13:40:42 +02004853static void BasicTestBuddyAllocator()
4854{
4855 wprintf(L"Basic test buddy allocator\n");
4856
4857 RandomNumberGenerator rand{76543};
4858
4859 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4860 sampleBufCreateInfo.size = 1024; // Whatever.
4861 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4862
4863 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4864 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4865
4866 VmaPoolCreateInfo poolCreateInfo = {};
4867 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004868 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004869
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004870 // Deliberately adding 1023 to test usable size smaller than memory block size.
4871 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004872 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004873 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004874
4875 VmaPool pool = nullptr;
4876 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004877 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004878
4879 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4880
4881 VmaAllocationCreateInfo allocCreateInfo = {};
4882 allocCreateInfo.pool = pool;
4883
4884 std::vector<BufferInfo> bufInfo;
4885 BufferInfo newBufInfo;
4886 VmaAllocationInfo allocInfo;
4887
4888 bufCreateInfo.size = 1024 * 256;
4889 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4890 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004891 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004892 bufInfo.push_back(newBufInfo);
4893
4894 bufCreateInfo.size = 1024 * 512;
4895 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4896 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004897 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004898 bufInfo.push_back(newBufInfo);
4899
4900 bufCreateInfo.size = 1024 * 128;
4901 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4902 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004903 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004904 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004905
4906 // Test very small allocation, smaller than minimum node size.
4907 bufCreateInfo.size = 1;
4908 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4909 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004910 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004911 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004912
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004913 // Test some small allocation with alignment requirement.
4914 {
4915 VkMemoryRequirements memReq;
4916 memReq.alignment = 256;
4917 memReq.memoryTypeBits = UINT32_MAX;
4918 memReq.size = 32;
4919
4920 newBufInfo.Buffer = VK_NULL_HANDLE;
4921 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4922 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004923 TEST(res == VK_SUCCESS);
4924 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004925 bufInfo.push_back(newBufInfo);
4926 }
4927
4928 //SaveAllocatorStatsToFile(L"TEST.json");
4929
Adam Sawicki21017c62018-09-07 15:26:59 +02004930 VmaPoolStats stats = {};
4931 vmaGetPoolStats(g_hAllocator, pool, &stats);
4932 int DBG = 0; // Set breakpoint here to inspect `stats`.
4933
Adam Sawicki80927152018-09-07 17:27:23 +02004934 // Allocate enough new buffers to surely fall into second block.
4935 for(uint32_t i = 0; i < 32; ++i)
4936 {
4937 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4938 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4939 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004940 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004941 bufInfo.push_back(newBufInfo);
4942 }
4943
4944 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4945
Adam Sawickia83793a2018-09-03 13:40:42 +02004946 // Destroy the buffers in random order.
4947 while(!bufInfo.empty())
4948 {
4949 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4950 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4951 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4952 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4953 }
4954
4955 vmaDestroyPool(g_hAllocator, pool);
4956}
4957
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004958static void BasicTestAllocatePages()
4959{
4960 wprintf(L"Basic test allocate pages\n");
4961
4962 RandomNumberGenerator rand{765461};
4963
4964 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4965 sampleBufCreateInfo.size = 1024; // Whatever.
4966 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4967
4968 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4969 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4970
4971 VmaPoolCreateInfo poolCreateInfo = {};
4972 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02004973 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004974
4975 // 1 block of 1 MB.
4976 poolCreateInfo.blockSize = 1024 * 1024;
4977 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
4978
4979 // Create pool.
4980 VmaPool pool = nullptr;
4981 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02004982 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004983
4984 // Make 100 allocations of 4 KB - they should fit into the pool.
4985 VkMemoryRequirements memReq;
4986 memReq.memoryTypeBits = UINT32_MAX;
4987 memReq.alignment = 4 * 1024;
4988 memReq.size = 4 * 1024;
4989
4990 VmaAllocationCreateInfo allocCreateInfo = {};
4991 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
4992 allocCreateInfo.pool = pool;
4993
4994 constexpr uint32_t allocCount = 100;
4995
4996 std::vector<VmaAllocation> alloc{allocCount};
4997 std::vector<VmaAllocationInfo> allocInfo{allocCount};
4998 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02004999 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005000 for(uint32_t i = 0; i < allocCount; ++i)
5001 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005002 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005003 allocInfo[i].pMappedData != nullptr &&
5004 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5005 allocInfo[i].memoryType == allocInfo[0].memoryType);
5006 }
5007
5008 // Free the allocations.
5009 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5010 std::fill(alloc.begin(), alloc.end(), nullptr);
5011 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5012
5013 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5014 // Also test optional allocationInfo = null.
5015 memReq.size = 100 * 1024;
5016 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005017 TEST(res != VK_SUCCESS);
5018 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005019
5020 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5021 memReq.size = 4 * 1024;
5022 memReq.alignment = 128 * 1024;
5023 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005024 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005025
5026 // Make 100 dedicated allocations of 4 KB.
5027 memReq.alignment = 4 * 1024;
5028 memReq.size = 4 * 1024;
5029
5030 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5031 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5032 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5033 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005034 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005035 for(uint32_t i = 0; i < allocCount; ++i)
5036 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005037 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005038 allocInfo[i].pMappedData != nullptr &&
5039 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5040 allocInfo[i].offset == 0);
5041 if(i > 0)
5042 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005043 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005044 }
5045 }
5046
5047 // Free the allocations.
5048 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5049 std::fill(alloc.begin(), alloc.end(), nullptr);
5050 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5051
5052 vmaDestroyPool(g_hAllocator, pool);
5053}
5054
Adam Sawickif2975342018-10-16 13:49:02 +02005055// Test the testing environment.
5056static void TestGpuData()
5057{
5058 RandomNumberGenerator rand = { 53434 };
5059
5060 std::vector<AllocInfo> allocInfo;
5061
5062 for(size_t i = 0; i < 100; ++i)
5063 {
5064 AllocInfo info = {};
5065
5066 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5067 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5068 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5069 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5070 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5071
5072 VmaAllocationCreateInfo allocCreateInfo = {};
5073 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5074
5075 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5076 TEST(res == VK_SUCCESS);
5077
5078 info.m_StartValue = rand.Generate();
5079
5080 allocInfo.push_back(std::move(info));
5081 }
5082
5083 UploadGpuData(allocInfo.data(), allocInfo.size());
5084
5085 ValidateGpuData(allocInfo.data(), allocInfo.size());
5086
5087 DestroyAllAllocations(allocInfo);
5088}
5089
Adam Sawickib8333fb2018-03-13 16:15:53 +01005090void Test()
5091{
5092 wprintf(L"TESTING:\n");
5093
Adam Sawicki5c8af7b2018-12-10 13:34:54 +01005094 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005095 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005096 ////////////////////////////////////////////////////////////////////////////////
5097 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02005098 return;
5099 }
5100
Adam Sawickib8333fb2018-03-13 16:15:53 +01005101 // # Simple tests
5102
5103 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005104 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005105#if VMA_DEBUG_MARGIN
5106 TestDebugMargin();
5107#else
5108 TestPool_SameSize();
5109 TestHeapSizeLimit();
5110#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005111#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5112 TestAllocationsInitialization();
5113#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005114 TestMapping();
Adam Sawickidaa6a552019-06-25 15:26:37 +02005115 TestDeviceLocalMapped();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005116 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005117 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005118 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005119 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005120
Adam Sawicki4338f662018-09-07 14:12:37 +02005121 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005122 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005123
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005124 {
5125 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005126 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005127 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005128 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005129 fclose(file);
5130 }
5131
Adam Sawickib8333fb2018-03-13 16:15:53 +01005132 TestDefragmentationSimple();
5133 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005134 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005135 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005136
5137 // # Detailed tests
5138 FILE* file;
5139 fopen_s(&file, "Results.csv", "w");
5140 assert(file != NULL);
5141
5142 WriteMainTestResultHeader(file);
5143 PerformMainTests(file);
5144 //PerformCustomMainTest(file);
5145
5146 WritePoolTestResultHeader(file);
5147 PerformPoolTests(file);
5148 //PerformCustomPoolTest(file);
5149
5150 fclose(file);
5151
5152 wprintf(L"Done.\n");
5153}
5154
Adam Sawickif1a793c2018-03-13 15:42:22 +01005155#endif // #ifdef _WIN32