blob: efa109e4a8a688b530cf83599f62dbf01709b77e [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
2// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawicki94ce3d72019-04-17 14:59:25 +020029#include <functional>
Adam Sawickif1a793c2018-03-13 15:42:22 +010030
31#ifdef _WIN32
32
Adam Sawicki33d2ce72018-08-27 13:59:13 +020033static const char* CODE_DESCRIPTION = "Foo";
34
Adam Sawickif2975342018-10-16 13:49:02 +020035extern VkCommandBuffer g_hTemporaryCommandBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +020036extern const VkAllocationCallbacks* g_Allocs;
Adam Sawickif2975342018-10-16 13:49:02 +020037void BeginSingleTimeCommands();
38void EndSingleTimeCommands();
39
Adam Sawickibdb89a92018-12-13 11:56:30 +010040#ifndef VMA_DEBUG_MARGIN
41 #define VMA_DEBUG_MARGIN 0
42#endif
43
Adam Sawicki0a607132018-08-24 11:18:41 +020044enum CONFIG_TYPE {
45 CONFIG_TYPE_MINIMUM,
46 CONFIG_TYPE_SMALL,
47 CONFIG_TYPE_AVERAGE,
48 CONFIG_TYPE_LARGE,
49 CONFIG_TYPE_MAXIMUM,
50 CONFIG_TYPE_COUNT
51};
52
Adam Sawickif2975342018-10-16 13:49:02 +020053static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
54//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020055
Adam Sawickib8333fb2018-03-13 16:15:53 +010056enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
57
Adam Sawicki0667e332018-08-24 17:26:44 +020058static const char* FREE_ORDER_NAMES[] = {
59 "FORWARD",
60 "BACKWARD",
61 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020062};
63
Adam Sawicki80927152018-09-07 17:27:23 +020064// Copy of internal VmaAlgorithmToStr.
65static const char* AlgorithmToStr(uint32_t algorithm)
66{
67 switch(algorithm)
68 {
69 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
70 return "Linear";
71 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
72 return "Buddy";
73 case 0:
74 return "Default";
75 default:
76 assert(0);
77 return "";
78 }
79}
80
Adam Sawickib8333fb2018-03-13 16:15:53 +010081struct AllocationSize
82{
83 uint32_t Probability;
84 VkDeviceSize BufferSizeMin, BufferSizeMax;
85 uint32_t ImageSizeMin, ImageSizeMax;
86};
87
88struct Config
89{
90 uint32_t RandSeed;
91 VkDeviceSize BeginBytesToAllocate;
92 uint32_t AdditionalOperationCount;
93 VkDeviceSize MaxBytesToAllocate;
94 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
95 std::vector<AllocationSize> AllocationSizes;
96 uint32_t ThreadCount;
97 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
98 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020099 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +0100100};
101
102struct Result
103{
104 duration TotalTime;
105 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
106 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
107 VkDeviceSize TotalMemoryAllocated;
108 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
109};
110
111void TestDefragmentationSimple();
112void TestDefragmentationFull();
113
114struct PoolTestConfig
115{
116 uint32_t RandSeed;
117 uint32_t ThreadCount;
118 VkDeviceSize PoolSize;
119 uint32_t FrameCount;
120 uint32_t TotalItemCount;
121 // Range for number of items used in each frame.
122 uint32_t UsedItemCountMin, UsedItemCountMax;
123 // Percent of items to make unused, and possibly make some others used in each frame.
124 uint32_t ItemsToMakeUnusedPercent;
125 std::vector<AllocationSize> AllocationSizes;
126
127 VkDeviceSize CalcAvgResourceSize() const
128 {
129 uint32_t probabilitySum = 0;
130 VkDeviceSize sizeSum = 0;
131 for(size_t i = 0; i < AllocationSizes.size(); ++i)
132 {
133 const AllocationSize& allocSize = AllocationSizes[i];
134 if(allocSize.BufferSizeMax > 0)
135 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
136 else
137 {
138 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
139 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
140 }
141 probabilitySum += allocSize.Probability;
142 }
143 return sizeSum / probabilitySum;
144 }
145
146 bool UsesBuffers() const
147 {
148 for(size_t i = 0; i < AllocationSizes.size(); ++i)
149 if(AllocationSizes[i].BufferSizeMax > 0)
150 return true;
151 return false;
152 }
153
154 bool UsesImages() const
155 {
156 for(size_t i = 0; i < AllocationSizes.size(); ++i)
157 if(AllocationSizes[i].ImageSizeMax > 0)
158 return true;
159 return false;
160 }
161};
162
163struct PoolTestResult
164{
165 duration TotalTime;
166 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
167 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
168 size_t LostAllocationCount, LostAllocationTotalSize;
169 size_t FailedAllocationCount, FailedAllocationTotalSize;
170};
171
172static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
173
Adam Sawicki51fa9662018-10-03 13:44:29 +0200174uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200175
Adam Sawickib8333fb2018-03-13 16:15:53 +0100176struct BufferInfo
177{
178 VkBuffer Buffer = VK_NULL_HANDLE;
179 VmaAllocation Allocation = VK_NULL_HANDLE;
180};
181
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200182static uint32_t GetAllocationStrategyCount()
183{
184 uint32_t strategyCount = 0;
185 switch(ConfigType)
186 {
187 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
188 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
189 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
190 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
191 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
192 default: assert(0);
193 }
194 return strategyCount;
195}
196
197static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
198{
199 switch(allocStrategy)
200 {
201 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
202 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
203 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
204 case 0: return "Default"; break;
205 default: assert(0); return "";
206 }
207}
208
Adam Sawickib8333fb2018-03-13 16:15:53 +0100209static void InitResult(Result& outResult)
210{
211 outResult.TotalTime = duration::zero();
212 outResult.AllocationTimeMin = duration::max();
213 outResult.AllocationTimeAvg = duration::zero();
214 outResult.AllocationTimeMax = duration::min();
215 outResult.DeallocationTimeMin = duration::max();
216 outResult.DeallocationTimeAvg = duration::zero();
217 outResult.DeallocationTimeMax = duration::min();
218 outResult.TotalMemoryAllocated = 0;
219 outResult.FreeRangeSizeAvg = 0;
220 outResult.FreeRangeSizeMax = 0;
221}
222
223class TimeRegisterObj
224{
225public:
226 TimeRegisterObj(duration& min, duration& sum, duration& max) :
227 m_Min(min),
228 m_Sum(sum),
229 m_Max(max),
230 m_TimeBeg(std::chrono::high_resolution_clock::now())
231 {
232 }
233
234 ~TimeRegisterObj()
235 {
236 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
237 m_Sum += d;
238 if(d < m_Min) m_Min = d;
239 if(d > m_Max) m_Max = d;
240 }
241
242private:
243 duration& m_Min;
244 duration& m_Sum;
245 duration& m_Max;
246 time_point m_TimeBeg;
247};
248
249struct PoolTestThreadResult
250{
251 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
252 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
253 size_t AllocationCount, DeallocationCount;
254 size_t LostAllocationCount, LostAllocationTotalSize;
255 size_t FailedAllocationCount, FailedAllocationTotalSize;
256};
257
258class AllocationTimeRegisterObj : public TimeRegisterObj
259{
260public:
261 AllocationTimeRegisterObj(Result& result) :
262 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
263 {
264 }
265};
266
267class DeallocationTimeRegisterObj : public TimeRegisterObj
268{
269public:
270 DeallocationTimeRegisterObj(Result& result) :
271 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
272 {
273 }
274};
275
276class PoolAllocationTimeRegisterObj : public TimeRegisterObj
277{
278public:
279 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
280 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
281 {
282 }
283};
284
285class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
286{
287public:
288 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
289 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
290 {
291 }
292};
293
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200294static void CurrentTimeToStr(std::string& out)
295{
296 time_t rawTime; time(&rawTime);
297 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
298 char timeStr[128];
299 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
300 out = timeStr;
301}
302
Adam Sawickib8333fb2018-03-13 16:15:53 +0100303VkResult MainTest(Result& outResult, const Config& config)
304{
305 assert(config.ThreadCount > 0);
306
307 InitResult(outResult);
308
309 RandomNumberGenerator mainRand{config.RandSeed};
310
311 time_point timeBeg = std::chrono::high_resolution_clock::now();
312
313 std::atomic<size_t> allocationCount = 0;
314 VkResult res = VK_SUCCESS;
315
316 uint32_t memUsageProbabilitySum =
317 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
318 config.MemUsageProbability[2] + config.MemUsageProbability[3];
319 assert(memUsageProbabilitySum > 0);
320
321 uint32_t allocationSizeProbabilitySum = std::accumulate(
322 config.AllocationSizes.begin(),
323 config.AllocationSizes.end(),
324 0u,
325 [](uint32_t sum, const AllocationSize& allocSize) {
326 return sum + allocSize.Probability;
327 });
328
329 struct Allocation
330 {
331 VkBuffer Buffer;
332 VkImage Image;
333 VmaAllocation Alloc;
334 };
335
336 std::vector<Allocation> commonAllocations;
337 std::mutex commonAllocationsMutex;
338
339 auto Allocate = [&](
340 VkDeviceSize bufferSize,
341 const VkExtent2D imageExtent,
342 RandomNumberGenerator& localRand,
343 VkDeviceSize& totalAllocatedBytes,
344 std::vector<Allocation>& allocations) -> VkResult
345 {
346 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
347
348 uint32_t memUsageIndex = 0;
349 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
350 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
351 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
352
353 VmaAllocationCreateInfo memReq = {};
354 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200355 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100356
357 Allocation allocation = {};
358 VmaAllocationInfo allocationInfo;
359
360 // Buffer
361 if(bufferSize > 0)
362 {
363 assert(imageExtent.width == 0);
364 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
365 bufferInfo.size = bufferSize;
366 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
367
368 {
369 AllocationTimeRegisterObj timeRegisterObj{outResult};
370 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
371 }
372 }
373 // Image
374 else
375 {
376 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
377 imageInfo.imageType = VK_IMAGE_TYPE_2D;
378 imageInfo.extent.width = imageExtent.width;
379 imageInfo.extent.height = imageExtent.height;
380 imageInfo.extent.depth = 1;
381 imageInfo.mipLevels = 1;
382 imageInfo.arrayLayers = 1;
383 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
384 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
385 VK_IMAGE_TILING_OPTIMAL :
386 VK_IMAGE_TILING_LINEAR;
387 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
388 switch(memReq.usage)
389 {
390 case VMA_MEMORY_USAGE_GPU_ONLY:
391 switch(localRand.Generate() % 3)
392 {
393 case 0:
394 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
395 break;
396 case 1:
397 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
398 break;
399 case 2:
400 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
401 break;
402 }
403 break;
404 case VMA_MEMORY_USAGE_CPU_ONLY:
405 case VMA_MEMORY_USAGE_CPU_TO_GPU:
406 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
407 break;
408 case VMA_MEMORY_USAGE_GPU_TO_CPU:
409 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
410 break;
411 }
412 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
413 imageInfo.flags = 0;
414
415 {
416 AllocationTimeRegisterObj timeRegisterObj{outResult};
417 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
418 }
419 }
420
421 if(res == VK_SUCCESS)
422 {
423 ++allocationCount;
424 totalAllocatedBytes += allocationInfo.size;
425 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
426 if(useCommonAllocations)
427 {
428 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
429 commonAllocations.push_back(allocation);
430 }
431 else
432 allocations.push_back(allocation);
433 }
434 else
435 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200436 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100437 }
438 return res;
439 };
440
441 auto GetNextAllocationSize = [&](
442 VkDeviceSize& outBufSize,
443 VkExtent2D& outImageSize,
444 RandomNumberGenerator& localRand)
445 {
446 outBufSize = 0;
447 outImageSize = {0, 0};
448
449 uint32_t allocSizeIndex = 0;
450 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
451 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
452 r -= config.AllocationSizes[allocSizeIndex++].Probability;
453
454 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
455 if(allocSize.BufferSizeMax > 0)
456 {
457 assert(allocSize.ImageSizeMax == 0);
458 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
459 outBufSize = allocSize.BufferSizeMin;
460 else
461 {
462 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
463 outBufSize = outBufSize / 16 * 16;
464 }
465 }
466 else
467 {
468 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
469 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
470 else
471 {
472 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
473 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
474 }
475 }
476 };
477
478 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
479 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
480
481 auto ThreadProc = [&](uint32_t randSeed) -> void
482 {
483 RandomNumberGenerator threadRand(randSeed);
484 VkDeviceSize threadTotalAllocatedBytes = 0;
485 std::vector<Allocation> threadAllocations;
486 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
487 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
488 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
489
490 // BEGIN ALLOCATIONS
491 for(;;)
492 {
493 VkDeviceSize bufferSize = 0;
494 VkExtent2D imageExtent = {};
495 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
496 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
497 threadBeginBytesToAllocate)
498 {
499 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
500 break;
501 }
502 else
503 break;
504 }
505
506 // ADDITIONAL ALLOCATIONS AND FREES
507 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
508 {
509 VkDeviceSize bufferSize = 0;
510 VkExtent2D imageExtent = {};
511 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
512
513 // true = allocate, false = free
514 bool allocate = threadRand.Generate() % 2 != 0;
515
516 if(allocate)
517 {
518 if(threadTotalAllocatedBytes +
519 bufferSize +
520 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
521 threadMaxBytesToAllocate)
522 {
523 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
524 break;
525 }
526 }
527 else
528 {
529 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
530 if(useCommonAllocations)
531 {
532 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
533 if(!commonAllocations.empty())
534 {
535 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
536 VmaAllocationInfo allocationInfo;
537 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
538 if(threadTotalAllocatedBytes >= allocationInfo.size)
539 {
540 DeallocationTimeRegisterObj timeRegisterObj{outResult};
541 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
542 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
543 else
544 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
545 threadTotalAllocatedBytes -= allocationInfo.size;
546 commonAllocations.erase(commonAllocations.begin() + indexToFree);
547 }
548 }
549 }
550 else
551 {
552 if(!threadAllocations.empty())
553 {
554 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
555 VmaAllocationInfo allocationInfo;
556 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
557 if(threadTotalAllocatedBytes >= allocationInfo.size)
558 {
559 DeallocationTimeRegisterObj timeRegisterObj{outResult};
560 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
561 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
562 else
563 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
564 threadTotalAllocatedBytes -= allocationInfo.size;
565 threadAllocations.erase(threadAllocations.begin() + indexToFree);
566 }
567 }
568 }
569 }
570 }
571
572 ++numThreadsReachedMaxAllocations;
573
574 WaitForSingleObject(threadsFinishEvent, INFINITE);
575
576 // DEALLOCATION
577 while(!threadAllocations.empty())
578 {
579 size_t indexToFree = 0;
580 switch(config.FreeOrder)
581 {
582 case FREE_ORDER::FORWARD:
583 indexToFree = 0;
584 break;
585 case FREE_ORDER::BACKWARD:
586 indexToFree = threadAllocations.size() - 1;
587 break;
588 case FREE_ORDER::RANDOM:
589 indexToFree = mainRand.Generate() % threadAllocations.size();
590 break;
591 }
592
593 {
594 DeallocationTimeRegisterObj timeRegisterObj{outResult};
595 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
596 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
597 else
598 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
599 }
600 threadAllocations.erase(threadAllocations.begin() + indexToFree);
601 }
602 };
603
604 uint32_t threadRandSeed = mainRand.Generate();
605 std::vector<std::thread> bkgThreads;
606 for(size_t i = 0; i < config.ThreadCount; ++i)
607 {
608 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
609 }
610
611 // Wait for threads reached max allocations
612 while(numThreadsReachedMaxAllocations < config.ThreadCount)
613 Sleep(0);
614
615 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
616 VmaStats vmaStats = {};
617 vmaCalculateStats(g_hAllocator, &vmaStats);
618 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
619 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
620 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
621
622 // Signal threads to deallocate
623 SetEvent(threadsFinishEvent);
624
625 // Wait for threads finished
626 for(size_t i = 0; i < bkgThreads.size(); ++i)
627 bkgThreads[i].join();
628 bkgThreads.clear();
629
630 CloseHandle(threadsFinishEvent);
631
632 // Deallocate remaining common resources
633 while(!commonAllocations.empty())
634 {
635 size_t indexToFree = 0;
636 switch(config.FreeOrder)
637 {
638 case FREE_ORDER::FORWARD:
639 indexToFree = 0;
640 break;
641 case FREE_ORDER::BACKWARD:
642 indexToFree = commonAllocations.size() - 1;
643 break;
644 case FREE_ORDER::RANDOM:
645 indexToFree = mainRand.Generate() % commonAllocations.size();
646 break;
647 }
648
649 {
650 DeallocationTimeRegisterObj timeRegisterObj{outResult};
651 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
652 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
653 else
654 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
655 }
656 commonAllocations.erase(commonAllocations.begin() + indexToFree);
657 }
658
659 if(allocationCount)
660 {
661 outResult.AllocationTimeAvg /= allocationCount;
662 outResult.DeallocationTimeAvg /= allocationCount;
663 }
664
665 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
666
667 return res;
668}
669
Adam Sawicki51fa9662018-10-03 13:44:29 +0200670void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100671{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100672 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100673 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200674 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100675 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200676 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100677}
678
679struct AllocInfo
680{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200681 VmaAllocation m_Allocation = VK_NULL_HANDLE;
682 VkBuffer m_Buffer = VK_NULL_HANDLE;
683 VkImage m_Image = VK_NULL_HANDLE;
684 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100685 union
686 {
687 VkBufferCreateInfo m_BufferInfo;
688 VkImageCreateInfo m_ImageInfo;
689 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200690
691 void CreateBuffer(
692 const VkBufferCreateInfo& bufCreateInfo,
693 const VmaAllocationCreateInfo& allocCreateInfo);
694 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100695};
696
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200697void AllocInfo::CreateBuffer(
698 const VkBufferCreateInfo& bufCreateInfo,
699 const VmaAllocationCreateInfo& allocCreateInfo)
700{
701 m_BufferInfo = bufCreateInfo;
702 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
703 TEST(res == VK_SUCCESS);
704}
705
706void AllocInfo::Destroy()
707{
708 if(m_Image)
709 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200710 vkDestroyImage(g_hDevice, m_Image, g_Allocs);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200711 }
712 if(m_Buffer)
713 {
Adam Sawicki1f84f622019-07-02 13:40:01 +0200714 vkDestroyBuffer(g_hDevice, m_Buffer, g_Allocs);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200715 }
716 if(m_Allocation)
717 {
718 vmaFreeMemory(g_hAllocator, m_Allocation);
719 }
720}
721
Adam Sawickif2975342018-10-16 13:49:02 +0200722class StagingBufferCollection
723{
724public:
725 StagingBufferCollection() { }
726 ~StagingBufferCollection();
727 // Returns false if maximum total size of buffers would be exceeded.
728 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
729 void ReleaseAllBuffers();
730
731private:
732 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
733 struct BufInfo
734 {
735 VmaAllocation Allocation = VK_NULL_HANDLE;
736 VkBuffer Buffer = VK_NULL_HANDLE;
737 VkDeviceSize Size = VK_WHOLE_SIZE;
738 void* MappedPtr = nullptr;
739 bool Used = false;
740 };
741 std::vector<BufInfo> m_Bufs;
742 // Including both used and unused.
743 VkDeviceSize m_TotalSize = 0;
744};
745
746StagingBufferCollection::~StagingBufferCollection()
747{
748 for(size_t i = m_Bufs.size(); i--; )
749 {
750 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
751 }
752}
753
754bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
755{
756 assert(size <= MAX_TOTAL_SIZE);
757
758 // Try to find existing unused buffer with best size.
759 size_t bestIndex = SIZE_MAX;
760 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
761 {
762 BufInfo& currBufInfo = m_Bufs[i];
763 if(!currBufInfo.Used && currBufInfo.Size >= size &&
764 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
765 {
766 bestIndex = i;
767 }
768 }
769
770 if(bestIndex != SIZE_MAX)
771 {
772 m_Bufs[bestIndex].Used = true;
773 outBuffer = m_Bufs[bestIndex].Buffer;
774 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
775 return true;
776 }
777
778 // Allocate new buffer with requested size.
779 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
780 {
781 BufInfo bufInfo;
782 bufInfo.Size = size;
783 bufInfo.Used = true;
784
785 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
786 bufCreateInfo.size = size;
787 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
788
789 VmaAllocationCreateInfo allocCreateInfo = {};
790 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
791 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
792
793 VmaAllocationInfo allocInfo;
794 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
795 bufInfo.MappedPtr = allocInfo.pMappedData;
796 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
797
798 outBuffer = bufInfo.Buffer;
799 outMappedPtr = bufInfo.MappedPtr;
800
801 m_Bufs.push_back(std::move(bufInfo));
802
803 m_TotalSize += size;
804
805 return true;
806 }
807
808 // There are some unused but smaller buffers: Free them and try again.
809 bool hasUnused = false;
810 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
811 {
812 if(!m_Bufs[i].Used)
813 {
814 hasUnused = true;
815 break;
816 }
817 }
818 if(hasUnused)
819 {
820 for(size_t i = m_Bufs.size(); i--; )
821 {
822 if(!m_Bufs[i].Used)
823 {
824 m_TotalSize -= m_Bufs[i].Size;
825 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
826 m_Bufs.erase(m_Bufs.begin() + i);
827 }
828 }
829
830 return AcquireBuffer(size, outBuffer, outMappedPtr);
831 }
832
833 return false;
834}
835
836void StagingBufferCollection::ReleaseAllBuffers()
837{
838 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
839 {
840 m_Bufs[i].Used = false;
841 }
842}
843
844static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
845{
846 StagingBufferCollection stagingBufs;
847
848 bool cmdBufferStarted = false;
849 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
850 {
851 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
852 if(currAllocInfo.m_Buffer)
853 {
854 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
855
856 VkBuffer stagingBuf = VK_NULL_HANDLE;
857 void* stagingBufMappedPtr = nullptr;
858 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
859 {
860 TEST(cmdBufferStarted);
861 EndSingleTimeCommands();
862 stagingBufs.ReleaseAllBuffers();
863 cmdBufferStarted = false;
864
865 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
866 TEST(ok);
867 }
868
869 // Fill staging buffer.
870 {
871 assert(size % sizeof(uint32_t) == 0);
872 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
873 uint32_t val = currAllocInfo.m_StartValue;
874 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
875 {
876 *stagingValPtr = val;
877 ++stagingValPtr;
878 ++val;
879 }
880 }
881
882 // Issue copy command from staging buffer to destination buffer.
883 if(!cmdBufferStarted)
884 {
885 cmdBufferStarted = true;
886 BeginSingleTimeCommands();
887 }
888
889 VkBufferCopy copy = {};
890 copy.srcOffset = 0;
891 copy.dstOffset = 0;
892 copy.size = size;
893 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
894 }
895 else
896 {
897 TEST(0 && "Images not currently supported.");
898 }
899 }
900
901 if(cmdBufferStarted)
902 {
903 EndSingleTimeCommands();
904 stagingBufs.ReleaseAllBuffers();
905 }
906}
907
908static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
909{
910 StagingBufferCollection stagingBufs;
911
912 bool cmdBufferStarted = false;
913 size_t validateAllocIndexOffset = 0;
914 std::vector<void*> validateStagingBuffers;
915 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
916 {
917 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
918 if(currAllocInfo.m_Buffer)
919 {
920 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
921
922 VkBuffer stagingBuf = VK_NULL_HANDLE;
923 void* stagingBufMappedPtr = nullptr;
924 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
925 {
926 TEST(cmdBufferStarted);
927 EndSingleTimeCommands();
928 cmdBufferStarted = false;
929
930 for(size_t validateIndex = 0;
931 validateIndex < validateStagingBuffers.size();
932 ++validateIndex)
933 {
934 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
935 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
936 TEST(validateSize % sizeof(uint32_t) == 0);
937 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
938 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
939 bool valid = true;
940 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
941 {
942 if(*stagingValPtr != val)
943 {
944 valid = false;
945 break;
946 }
947 ++stagingValPtr;
948 ++val;
949 }
950 TEST(valid);
951 }
952
953 stagingBufs.ReleaseAllBuffers();
954
955 validateAllocIndexOffset = allocInfoIndex;
956 validateStagingBuffers.clear();
957
958 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
959 TEST(ok);
960 }
961
962 // Issue copy command from staging buffer to destination buffer.
963 if(!cmdBufferStarted)
964 {
965 cmdBufferStarted = true;
966 BeginSingleTimeCommands();
967 }
968
969 VkBufferCopy copy = {};
970 copy.srcOffset = 0;
971 copy.dstOffset = 0;
972 copy.size = size;
973 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
974
975 // Sava mapped pointer for later validation.
976 validateStagingBuffers.push_back(stagingBufMappedPtr);
977 }
978 else
979 {
980 TEST(0 && "Images not currently supported.");
981 }
982 }
983
984 if(cmdBufferStarted)
985 {
986 EndSingleTimeCommands();
987
988 for(size_t validateIndex = 0;
989 validateIndex < validateStagingBuffers.size();
990 ++validateIndex)
991 {
992 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
993 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
994 TEST(validateSize % sizeof(uint32_t) == 0);
995 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
996 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
997 bool valid = true;
998 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
999 {
1000 if(*stagingValPtr != val)
1001 {
1002 valid = false;
1003 break;
1004 }
1005 ++stagingValPtr;
1006 ++val;
1007 }
1008 TEST(valid);
1009 }
1010
1011 stagingBufs.ReleaseAllBuffers();
1012 }
1013}
1014
Adam Sawickib8333fb2018-03-13 16:15:53 +01001015static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1016{
1017 outMemReq = {};
1018 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1019 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1020}
1021
1022static void CreateBuffer(
1023 VmaPool pool,
1024 const VkBufferCreateInfo& bufCreateInfo,
1025 bool persistentlyMapped,
1026 AllocInfo& outAllocInfo)
1027{
1028 outAllocInfo = {};
1029 outAllocInfo.m_BufferInfo = bufCreateInfo;
1030
1031 VmaAllocationCreateInfo allocCreateInfo = {};
1032 allocCreateInfo.pool = pool;
1033 if(persistentlyMapped)
1034 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1035
1036 VmaAllocationInfo vmaAllocInfo = {};
1037 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1038
1039 // Setup StartValue and fill.
1040 {
1041 outAllocInfo.m_StartValue = (uint32_t)rand();
1042 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001043 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001044 if(!persistentlyMapped)
1045 {
1046 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1047 }
1048
1049 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001050 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001051 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1052 data[i] = value++;
1053
1054 if(!persistentlyMapped)
1055 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1056 }
1057}
1058
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001059static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001060{
1061 outAllocation.m_Allocation = nullptr;
1062 outAllocation.m_Buffer = nullptr;
1063 outAllocation.m_Image = nullptr;
1064 outAllocation.m_StartValue = (uint32_t)rand();
1065
1066 VmaAllocationCreateInfo vmaMemReq;
1067 GetMemReq(vmaMemReq);
1068
1069 VmaAllocationInfo allocInfo;
1070
1071 const bool isBuffer = true;//(rand() & 0x1) != 0;
1072 const bool isLarge = (rand() % 16) == 0;
1073 if(isBuffer)
1074 {
1075 const uint32_t bufferSize = isLarge ?
1076 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1077 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1078
1079 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1080 bufferInfo.size = bufferSize;
1081 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1082
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001083 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001084 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001085 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001086 }
1087 else
1088 {
1089 const uint32_t imageSizeX = isLarge ?
1090 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1091 rand() % 1024 + 1; // 1 ... 1024
1092 const uint32_t imageSizeY = isLarge ?
1093 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1094 rand() % 1024 + 1; // 1 ... 1024
1095
1096 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1097 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1098 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1099 imageInfo.extent.width = imageSizeX;
1100 imageInfo.extent.height = imageSizeY;
1101 imageInfo.extent.depth = 1;
1102 imageInfo.mipLevels = 1;
1103 imageInfo.arrayLayers = 1;
1104 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1105 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1106 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1107 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1108
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001109 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001110 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001111 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001112 }
1113
1114 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1115 if(allocInfo.pMappedData == nullptr)
1116 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001117 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001118 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001119 }
1120
1121 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001122 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001123 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1124 data[i] = value++;
1125
1126 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001127 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001128}
1129
1130static void DestroyAllocation(const AllocInfo& allocation)
1131{
1132 if(allocation.m_Buffer)
1133 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1134 else
1135 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1136}
1137
1138static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1139{
1140 for(size_t i = allocations.size(); i--; )
1141 DestroyAllocation(allocations[i]);
1142 allocations.clear();
1143}
1144
1145static void ValidateAllocationData(const AllocInfo& allocation)
1146{
1147 VmaAllocationInfo allocInfo;
1148 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1149
1150 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1151 if(allocInfo.pMappedData == nullptr)
1152 {
1153 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001154 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001155 }
1156
1157 uint32_t value = allocation.m_StartValue;
1158 bool ok = true;
1159 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001160 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001161 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1162 {
1163 if(data[i] != value++)
1164 {
1165 ok = false;
1166 break;
1167 }
1168 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001169 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001170
1171 if(allocInfo.pMappedData == nullptr)
1172 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1173}
1174
1175static void RecreateAllocationResource(AllocInfo& allocation)
1176{
1177 VmaAllocationInfo allocInfo;
1178 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1179
1180 if(allocation.m_Buffer)
1181 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001182 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001183
Adam Sawicki1f84f622019-07-02 13:40:01 +02001184 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, g_Allocs, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001185 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001186
1187 // Just to silence validation layer warnings.
1188 VkMemoryRequirements vkMemReq;
1189 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001190 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001191
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001192 res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001193 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001194 }
1195 else
1196 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001197 vkDestroyImage(g_hDevice, allocation.m_Image, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001198
Adam Sawicki1f84f622019-07-02 13:40:01 +02001199 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, g_Allocs, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001200 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001201
1202 // Just to silence validation layer warnings.
1203 VkMemoryRequirements vkMemReq;
1204 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1205
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001206 res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001207 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001208 }
1209}
1210
1211static void Defragment(AllocInfo* allocs, size_t allocCount,
1212 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1213 VmaDefragmentationStats* defragmentationStats = nullptr)
1214{
1215 std::vector<VmaAllocation> vmaAllocs(allocCount);
1216 for(size_t i = 0; i < allocCount; ++i)
1217 vmaAllocs[i] = allocs[i].m_Allocation;
1218
1219 std::vector<VkBool32> allocChanged(allocCount);
1220
1221 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1222 defragmentationInfo, defragmentationStats) );
1223
1224 for(size_t i = 0; i < allocCount; ++i)
1225 {
1226 if(allocChanged[i])
1227 {
1228 RecreateAllocationResource(allocs[i]);
1229 }
1230 }
1231}
1232
1233static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1234{
1235 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1236 ValidateAllocationData(allocInfo);
1237 });
1238}
1239
1240void TestDefragmentationSimple()
1241{
1242 wprintf(L"Test defragmentation simple\n");
1243
1244 RandomNumberGenerator rand(667);
1245
1246 const VkDeviceSize BUF_SIZE = 0x10000;
1247 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1248
1249 const VkDeviceSize MIN_BUF_SIZE = 32;
1250 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1251 auto RandomBufSize = [&]() -> VkDeviceSize {
1252 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1253 };
1254
1255 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1256 bufCreateInfo.size = BUF_SIZE;
1257 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1258
1259 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1260 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1261
1262 uint32_t memTypeIndex = UINT32_MAX;
1263 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1264
1265 VmaPoolCreateInfo poolCreateInfo = {};
1266 poolCreateInfo.blockSize = BLOCK_SIZE;
1267 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1268
1269 VmaPool pool;
1270 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1271
Adam Sawickie1681912018-11-23 17:50:12 +01001272 // Defragmentation of empty pool.
1273 {
1274 VmaDefragmentationInfo2 defragInfo = {};
1275 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1276 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1277 defragInfo.poolCount = 1;
1278 defragInfo.pPools = &pool;
1279
1280 VmaDefragmentationStats defragStats = {};
1281 VmaDefragmentationContext defragCtx = nullptr;
1282 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1283 TEST(res >= VK_SUCCESS);
1284 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1285 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1286 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1287 }
1288
Adam Sawickib8333fb2018-03-13 16:15:53 +01001289 std::vector<AllocInfo> allocations;
1290
1291 // persistentlyMappedOption = 0 - not persistently mapped.
1292 // persistentlyMappedOption = 1 - persistently mapped.
1293 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1294 {
1295 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1296 const bool persistentlyMapped = persistentlyMappedOption != 0;
1297
1298 // # Test 1
1299 // Buffers of fixed size.
1300 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1301 // Expected result: at least 1 block freed.
1302 {
1303 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1304 {
1305 AllocInfo allocInfo;
1306 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1307 allocations.push_back(allocInfo);
1308 }
1309
1310 for(size_t i = 1; i < allocations.size(); ++i)
1311 {
1312 DestroyAllocation(allocations[i]);
1313 allocations.erase(allocations.begin() + i);
1314 }
1315
1316 VmaDefragmentationStats defragStats;
1317 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001318 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1319 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001320
1321 ValidateAllocationsData(allocations.data(), allocations.size());
1322
1323 DestroyAllAllocations(allocations);
1324 }
1325
1326 // # Test 2
1327 // Buffers of fixed size.
1328 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1329 // Expected result: Each of 4 interations makes some progress.
1330 {
1331 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1332 {
1333 AllocInfo allocInfo;
1334 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1335 allocations.push_back(allocInfo);
1336 }
1337
1338 for(size_t i = 1; i < allocations.size(); ++i)
1339 {
1340 DestroyAllocation(allocations[i]);
1341 allocations.erase(allocations.begin() + i);
1342 }
1343
1344 VmaDefragmentationInfo defragInfo = {};
1345 defragInfo.maxAllocationsToMove = 1;
1346 defragInfo.maxBytesToMove = BUF_SIZE;
1347
1348 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1349 {
1350 VmaDefragmentationStats defragStats;
1351 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001352 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001353 }
1354
1355 ValidateAllocationsData(allocations.data(), allocations.size());
1356
1357 DestroyAllAllocations(allocations);
1358 }
1359
1360 // # Test 3
1361 // Buffers of variable size.
1362 // Create a number of buffers. Remove some percent of them.
1363 // Defragment while having some percent of them unmovable.
1364 // Expected result: Just simple validation.
1365 {
1366 for(size_t i = 0; i < 100; ++i)
1367 {
1368 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1369 localBufCreateInfo.size = RandomBufSize();
1370
1371 AllocInfo allocInfo;
1372 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1373 allocations.push_back(allocInfo);
1374 }
1375
1376 const uint32_t percentToDelete = 60;
1377 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1378 for(size_t i = 0; i < numberToDelete; ++i)
1379 {
1380 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1381 DestroyAllocation(allocations[indexToDelete]);
1382 allocations.erase(allocations.begin() + indexToDelete);
1383 }
1384
1385 // Non-movable allocations will be at the beginning of allocations array.
1386 const uint32_t percentNonMovable = 20;
1387 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1388 for(size_t i = 0; i < numberNonMovable; ++i)
1389 {
1390 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1391 if(indexNonMovable != i)
1392 std::swap(allocations[i], allocations[indexNonMovable]);
1393 }
1394
1395 VmaDefragmentationStats defragStats;
1396 Defragment(
1397 allocations.data() + numberNonMovable,
1398 allocations.size() - numberNonMovable,
1399 nullptr, &defragStats);
1400
1401 ValidateAllocationsData(allocations.data(), allocations.size());
1402
1403 DestroyAllAllocations(allocations);
1404 }
1405 }
1406
Adam Sawicki647cf242018-11-23 17:58:00 +01001407 /*
1408 Allocation that must be move to an overlapping place using memmove().
1409 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1410 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001411 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001412 {
1413 AllocInfo allocInfo[2];
1414
1415 bufCreateInfo.size = BUF_SIZE;
1416 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1417 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1418 bufCreateInfo.size = biggerBufSize;
1419 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1420
1421 DestroyAllocation(allocInfo[0]);
1422
1423 VmaDefragmentationStats defragStats;
1424 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1425 // If this fails, it means we couldn't do memmove with overlapping regions.
1426 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1427
1428 ValidateAllocationsData(&allocInfo[1], 1);
1429 DestroyAllocation(allocInfo[1]);
1430 }
1431
Adam Sawickib8333fb2018-03-13 16:15:53 +01001432 vmaDestroyPool(g_hAllocator, pool);
1433}
1434
Adam Sawicki52076eb2018-11-22 16:14:50 +01001435void TestDefragmentationWholePool()
1436{
1437 wprintf(L"Test defragmentation whole pool\n");
1438
1439 RandomNumberGenerator rand(668);
1440
1441 const VkDeviceSize BUF_SIZE = 0x10000;
1442 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1443
1444 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1445 bufCreateInfo.size = BUF_SIZE;
1446 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1447
1448 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1449 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1450
1451 uint32_t memTypeIndex = UINT32_MAX;
1452 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1453
1454 VmaPoolCreateInfo poolCreateInfo = {};
1455 poolCreateInfo.blockSize = BLOCK_SIZE;
1456 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1457
1458 VmaDefragmentationStats defragStats[2];
1459 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1460 {
1461 VmaPool pool;
1462 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1463
1464 std::vector<AllocInfo> allocations;
1465
1466 // Buffers of fixed size.
1467 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1468 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1469 {
1470 AllocInfo allocInfo;
1471 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1472 allocations.push_back(allocInfo);
1473 }
1474
1475 for(size_t i = 1; i < allocations.size(); ++i)
1476 {
1477 DestroyAllocation(allocations[i]);
1478 allocations.erase(allocations.begin() + i);
1479 }
1480
1481 VmaDefragmentationInfo2 defragInfo = {};
1482 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1483 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1484 std::vector<VmaAllocation> allocationsToDefrag;
1485 if(caseIndex == 0)
1486 {
1487 defragInfo.poolCount = 1;
1488 defragInfo.pPools = &pool;
1489 }
1490 else
1491 {
1492 const size_t allocCount = allocations.size();
1493 allocationsToDefrag.resize(allocCount);
1494 std::transform(
1495 allocations.begin(), allocations.end(),
1496 allocationsToDefrag.begin(),
1497 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1498 defragInfo.allocationCount = (uint32_t)allocCount;
1499 defragInfo.pAllocations = allocationsToDefrag.data();
1500 }
1501
1502 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1503 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1504 TEST(res >= VK_SUCCESS);
1505 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1506
1507 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1508
1509 ValidateAllocationsData(allocations.data(), allocations.size());
1510
1511 DestroyAllAllocations(allocations);
1512
1513 vmaDestroyPool(g_hAllocator, pool);
1514 }
1515
1516 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1517 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1518 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1519 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1520}
1521
Adam Sawickib8333fb2018-03-13 16:15:53 +01001522void TestDefragmentationFull()
1523{
1524 std::vector<AllocInfo> allocations;
1525
1526 // Create initial allocations.
1527 for(size_t i = 0; i < 400; ++i)
1528 {
1529 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001530 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001531 allocations.push_back(allocation);
1532 }
1533
1534 // Delete random allocations
1535 const size_t allocationsToDeletePercent = 80;
1536 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1537 for(size_t i = 0; i < allocationsToDelete; ++i)
1538 {
1539 size_t index = (size_t)rand() % allocations.size();
1540 DestroyAllocation(allocations[index]);
1541 allocations.erase(allocations.begin() + index);
1542 }
1543
1544 for(size_t i = 0; i < allocations.size(); ++i)
1545 ValidateAllocationData(allocations[i]);
1546
Adam Sawicki0667e332018-08-24 17:26:44 +02001547 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001548
1549 {
1550 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1551 for(size_t i = 0; i < allocations.size(); ++i)
1552 vmaAllocations[i] = allocations[i].m_Allocation;
1553
1554 const size_t nonMovablePercent = 0;
1555 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1556 for(size_t i = 0; i < nonMovableCount; ++i)
1557 {
1558 size_t index = (size_t)rand() % vmaAllocations.size();
1559 vmaAllocations.erase(vmaAllocations.begin() + index);
1560 }
1561
1562 const uint32_t defragCount = 1;
1563 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1564 {
1565 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1566
1567 VmaDefragmentationInfo defragmentationInfo;
1568 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1569 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1570
1571 wprintf(L"Defragmentation #%u\n", defragIndex);
1572
1573 time_point begTime = std::chrono::high_resolution_clock::now();
1574
1575 VmaDefragmentationStats stats;
1576 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001577 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001578
1579 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1580
1581 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1582 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1583 wprintf(L"Time: %.2f s\n", defragmentDuration);
1584
1585 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1586 {
1587 if(allocationsChanged[i])
1588 {
1589 RecreateAllocationResource(allocations[i]);
1590 }
1591 }
1592
1593 for(size_t i = 0; i < allocations.size(); ++i)
1594 ValidateAllocationData(allocations[i]);
1595
Adam Sawicki0667e332018-08-24 17:26:44 +02001596 //wchar_t fileName[MAX_PATH];
1597 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1598 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001599 }
1600 }
1601
1602 // Destroy all remaining allocations.
1603 DestroyAllAllocations(allocations);
1604}
1605
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001606static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001607{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001608 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001609 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001610
1611 std::vector<AllocInfo> allocations;
1612
1613 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001614 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1615 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001616 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001617 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1618 const size_t percentToLeave = 30;
1619 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001620 RandomNumberGenerator rand = { 234522 };
1621
1622 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001623
1624 VmaAllocationCreateInfo allocCreateInfo = {};
1625 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001626 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001627
1628 // Create all intended buffers.
1629 for(size_t i = 0; i < bufCount; ++i)
1630 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001631 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1632
1633 if(rand.Generate() % 100 < percentNonMovable)
1634 {
1635 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1636 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1637 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1638 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1639 }
1640 else
1641 {
1642 // Different usage just to see different color in output from VmaDumpVis.
1643 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1644 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1645 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1646 // And in JSON dump.
1647 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1648 }
1649
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001650 AllocInfo alloc;
1651 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1652 alloc.m_StartValue = rand.Generate();
1653 allocations.push_back(alloc);
1654 }
1655
1656 // Destroy some percentage of them.
1657 {
1658 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1659 for(size_t i = 0; i < buffersToDestroy; ++i)
1660 {
1661 const size_t index = rand.Generate() % allocations.size();
1662 allocations[index].Destroy();
1663 allocations.erase(allocations.begin() + index);
1664 }
1665 }
1666
1667 // Fill them with meaningful data.
1668 UploadGpuData(allocations.data(), allocations.size());
1669
Adam Sawickic6ede152018-11-16 17:04:14 +01001670 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001671 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001672 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001673
1674 // Defragment using GPU only.
1675 {
1676 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001677
Adam Sawickic6ede152018-11-16 17:04:14 +01001678 std::vector<VmaAllocation> allocationPtrs;
1679 std::vector<VkBool32> allocationChanged;
1680 std::vector<size_t> allocationOriginalIndex;
1681
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001682 for(size_t i = 0; i < allocCount; ++i)
1683 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001684 VmaAllocationInfo allocInfo = {};
1685 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1686 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1687 {
1688 allocationPtrs.push_back(allocations[i].m_Allocation);
1689 allocationChanged.push_back(VK_FALSE);
1690 allocationOriginalIndex.push_back(i);
1691 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001692 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001693
1694 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001695
1696 BeginSingleTimeCommands();
1697
1698 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001699 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001700 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001701 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001702 defragInfo.pAllocationsChanged = allocationChanged.data();
1703 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001704 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1705 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1706
1707 VmaDefragmentationStats stats = {};
1708 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1709 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1710 TEST(res >= VK_SUCCESS);
1711
1712 EndSingleTimeCommands();
1713
1714 vmaDefragmentationEnd(g_hAllocator, ctx);
1715
Adam Sawickic6ede152018-11-16 17:04:14 +01001716 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001717 {
1718 if(allocationChanged[i])
1719 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001720 const size_t origAllocIndex = allocationOriginalIndex[i];
1721 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001722 }
1723 }
1724
Adam Sawicki4d844e22019-01-24 16:21:05 +01001725 // If corruption detection is enabled, GPU defragmentation may not work on
1726 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001727 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001728 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1729 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001730 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001731 }
1732
1733 ValidateGpuData(allocations.data(), allocations.size());
1734
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001735 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001736 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001737
1738 // Destroy all remaining buffers.
1739 for(size_t i = allocations.size(); i--; )
1740 {
1741 allocations[i].Destroy();
1742 }
Adam Sawicki05704002018-11-08 16:07:29 +01001743
1744 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001745}
1746
Adam Sawickib8333fb2018-03-13 16:15:53 +01001747static void TestUserData()
1748{
1749 VkResult res;
1750
1751 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1752 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1753 bufCreateInfo.size = 0x10000;
1754
1755 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1756 {
1757 // Opaque pointer
1758 {
1759
1760 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1761 void* pointerToSomething = &res;
1762
1763 VmaAllocationCreateInfo allocCreateInfo = {};
1764 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1765 allocCreateInfo.pUserData = numberAsPointer;
1766 if(testIndex == 1)
1767 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1768
1769 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1770 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001771 TEST(res == VK_SUCCESS);
1772 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001773
1774 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001775 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001776
1777 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1778 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001779 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001780
1781 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1782 }
1783
1784 // String
1785 {
1786 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1787 const char* name2 = "2";
1788 const size_t name1Len = strlen(name1);
1789
1790 char* name1Buf = new char[name1Len + 1];
1791 strcpy_s(name1Buf, name1Len + 1, name1);
1792
1793 VmaAllocationCreateInfo allocCreateInfo = {};
1794 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1795 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1796 allocCreateInfo.pUserData = name1Buf;
1797 if(testIndex == 1)
1798 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1799
1800 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1801 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001802 TEST(res == VK_SUCCESS);
1803 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1804 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001805
1806 delete[] name1Buf;
1807
1808 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001809 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001810
1811 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1812 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001813 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001814
1815 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1816 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001817 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001818
1819 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1820 }
1821 }
1822}
1823
Adam Sawicki370ab182018-11-08 16:31:00 +01001824static void TestInvalidAllocations()
1825{
1826 VkResult res;
1827
1828 VmaAllocationCreateInfo allocCreateInfo = {};
1829 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1830
1831 // Try to allocate 0 bytes.
1832 {
1833 VkMemoryRequirements memReq = {};
1834 memReq.size = 0; // !!!
1835 memReq.alignment = 4;
1836 memReq.memoryTypeBits = UINT32_MAX;
1837 VmaAllocation alloc = VK_NULL_HANDLE;
1838 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1839 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1840 }
1841
1842 // Try to create buffer with size = 0.
1843 {
1844 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1845 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1846 bufCreateInfo.size = 0; // !!!
1847 VkBuffer buf = VK_NULL_HANDLE;
1848 VmaAllocation alloc = VK_NULL_HANDLE;
1849 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1850 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1851 }
1852
1853 // Try to create image with one dimension = 0.
1854 {
1855 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1856 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1857 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1858 imageCreateInfo.extent.width = 128;
1859 imageCreateInfo.extent.height = 0; // !!!
1860 imageCreateInfo.extent.depth = 1;
1861 imageCreateInfo.mipLevels = 1;
1862 imageCreateInfo.arrayLayers = 1;
1863 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1864 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1865 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1866 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1867 VkImage image = VK_NULL_HANDLE;
1868 VmaAllocation alloc = VK_NULL_HANDLE;
1869 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1870 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1871 }
1872}
1873
Adam Sawickib8333fb2018-03-13 16:15:53 +01001874static void TestMemoryRequirements()
1875{
1876 VkResult res;
1877 VkBuffer buf;
1878 VmaAllocation alloc;
1879 VmaAllocationInfo allocInfo;
1880
1881 const VkPhysicalDeviceMemoryProperties* memProps;
1882 vmaGetMemoryProperties(g_hAllocator, &memProps);
1883
1884 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1885 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1886 bufInfo.size = 128;
1887
1888 VmaAllocationCreateInfo allocCreateInfo = {};
1889
1890 // No requirements.
1891 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001892 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001893 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1894
1895 // Usage.
1896 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1897 allocCreateInfo.requiredFlags = 0;
1898 allocCreateInfo.preferredFlags = 0;
1899 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1900
1901 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001902 TEST(res == VK_SUCCESS);
1903 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001904 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1905
1906 // Required flags, preferred flags.
1907 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1908 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1909 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1910 allocCreateInfo.memoryTypeBits = 0;
1911
1912 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001913 TEST(res == VK_SUCCESS);
1914 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1915 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001916 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1917
1918 // memoryTypeBits.
1919 const uint32_t memType = allocInfo.memoryType;
1920 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1921 allocCreateInfo.requiredFlags = 0;
1922 allocCreateInfo.preferredFlags = 0;
1923 allocCreateInfo.memoryTypeBits = 1u << memType;
1924
1925 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001926 TEST(res == VK_SUCCESS);
1927 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001928 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1929
1930}
1931
1932static void TestBasics()
1933{
1934 VkResult res;
1935
1936 TestMemoryRequirements();
1937
1938 // Lost allocation
1939 {
1940 VmaAllocation alloc = VK_NULL_HANDLE;
1941 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001942 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001943
1944 VmaAllocationInfo allocInfo;
1945 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001946 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1947 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001948
1949 vmaFreeMemory(g_hAllocator, alloc);
1950 }
1951
1952 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1953 {
1954 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1955 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1956 bufCreateInfo.size = 128;
1957
1958 VmaAllocationCreateInfo allocCreateInfo = {};
1959 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1960 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1961
1962 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1963 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001964 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001965
1966 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1967
1968 // Same with OWN_MEMORY.
1969 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1970
1971 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001972 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001973
1974 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1975 }
1976
1977 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001978
1979 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001980}
1981
1982void TestHeapSizeLimit()
1983{
Adam Sawickib3f51102019-11-18 13:05:56 +01001984 const VkDeviceSize HEAP_SIZE_LIMIT = 200ull * 1024 * 1024; // 200 MB
1985 const VkDeviceSize BLOCK_SIZE = 20ull * 1024 * 1024; // 20 MB
Adam Sawickib8333fb2018-03-13 16:15:53 +01001986
1987 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1988 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1989 {
1990 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1991 }
1992
1993 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1994 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1995 allocatorCreateInfo.device = g_hDevice;
1996 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1997
1998 VmaAllocator hAllocator;
1999 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002000 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002001
2002 struct Item
2003 {
2004 VkBuffer hBuf;
2005 VmaAllocation hAlloc;
2006 };
2007 std::vector<Item> items;
2008
2009 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2010 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2011
2012 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
2013 VmaAllocationInfo ownAllocInfo;
2014 {
2015 VmaAllocationCreateInfo allocCreateInfo = {};
2016 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2017 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2018
2019 bufCreateInfo.size = BLOCK_SIZE / 2;
2020
2021 for(size_t i = 0; i < 2; ++i)
2022 {
2023 Item item;
2024 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002025 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002026 items.push_back(item);
2027 }
2028 }
2029
2030 // Create pool to make sure allocations must be out of this memory type.
2031 VmaPoolCreateInfo poolCreateInfo = {};
2032 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
2033 poolCreateInfo.blockSize = BLOCK_SIZE;
2034
2035 VmaPool hPool;
2036 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002037 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002038
2039 // 2. Allocate normal buffers from all the remaining memory.
2040 {
2041 VmaAllocationCreateInfo allocCreateInfo = {};
2042 allocCreateInfo.pool = hPool;
2043
2044 bufCreateInfo.size = BLOCK_SIZE / 2;
2045
2046 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2047 for(size_t i = 0; i < bufCount; ++i)
2048 {
2049 Item item;
2050 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002051 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002052 items.push_back(item);
2053 }
2054 }
2055
2056 // 3. Allocation of one more (even small) buffer should fail.
2057 {
2058 VmaAllocationCreateInfo allocCreateInfo = {};
2059 allocCreateInfo.pool = hPool;
2060
2061 bufCreateInfo.size = 128;
2062
2063 VkBuffer hBuf;
2064 VmaAllocation hAlloc;
2065 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002066 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002067 }
2068
2069 // Destroy everything.
2070 for(size_t i = items.size(); i--; )
2071 {
2072 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2073 }
2074
2075 vmaDestroyPool(hAllocator, hPool);
2076
2077 vmaDestroyAllocator(hAllocator);
2078}
2079
Adam Sawicki212a4a62018-06-14 15:44:45 +02002080#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002081static void TestDebugMargin()
2082{
2083 if(VMA_DEBUG_MARGIN == 0)
2084 {
2085 return;
2086 }
2087
2088 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002089 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002090
2091 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002092 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002093
2094 // Create few buffers of different size.
2095 const size_t BUF_COUNT = 10;
2096 BufferInfo buffers[BUF_COUNT];
2097 VmaAllocationInfo allocInfo[BUF_COUNT];
2098 for(size_t i = 0; i < 10; ++i)
2099 {
2100 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002101 // Last one will be mapped.
2102 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002103
2104 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002105 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002106 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002107 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002108
2109 if(i == BUF_COUNT - 1)
2110 {
2111 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002112 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002113 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2114 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2115 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002116 }
2117
2118 // Check if their offsets preserve margin between them.
2119 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2120 {
2121 if(lhs.deviceMemory != rhs.deviceMemory)
2122 {
2123 return lhs.deviceMemory < rhs.deviceMemory;
2124 }
2125 return lhs.offset < rhs.offset;
2126 });
2127 for(size_t i = 1; i < BUF_COUNT; ++i)
2128 {
2129 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2130 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002131 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002132 }
2133 }
2134
Adam Sawicki212a4a62018-06-14 15:44:45 +02002135 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002136 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002137
Adam Sawicki73b16652018-06-11 16:39:25 +02002138 // Destroy all buffers.
2139 for(size_t i = BUF_COUNT; i--; )
2140 {
2141 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2142 }
2143}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002144#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002145
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002146static void TestLinearAllocator()
2147{
2148 wprintf(L"Test linear allocator\n");
2149
2150 RandomNumberGenerator rand{645332};
2151
2152 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2153 sampleBufCreateInfo.size = 1024; // Whatever.
2154 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2155
2156 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2157 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2158
2159 VmaPoolCreateInfo poolCreateInfo = {};
2160 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002161 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002162
Adam Sawickiee082772018-06-20 17:45:49 +02002163 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002164 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2165 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2166
2167 VmaPool pool = nullptr;
2168 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002169 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002170
2171 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2172
2173 VmaAllocationCreateInfo allocCreateInfo = {};
2174 allocCreateInfo.pool = pool;
2175
2176 constexpr size_t maxBufCount = 100;
2177 std::vector<BufferInfo> bufInfo;
2178
2179 constexpr VkDeviceSize bufSizeMin = 16;
2180 constexpr VkDeviceSize bufSizeMax = 1024;
2181 VmaAllocationInfo allocInfo;
2182 VkDeviceSize prevOffset = 0;
2183
2184 // Test one-time free.
2185 for(size_t i = 0; i < 2; ++i)
2186 {
2187 // Allocate number of buffers of varying size that surely fit into this block.
2188 VkDeviceSize bufSumSize = 0;
2189 for(size_t i = 0; i < maxBufCount; ++i)
2190 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002191 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002192 BufferInfo newBufInfo;
2193 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2194 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002195 TEST(res == VK_SUCCESS);
2196 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002197 bufInfo.push_back(newBufInfo);
2198 prevOffset = allocInfo.offset;
2199 bufSumSize += bufCreateInfo.size;
2200 }
2201
2202 // Validate pool stats.
2203 VmaPoolStats stats;
2204 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002205 TEST(stats.size == poolCreateInfo.blockSize);
2206 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2207 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002208
2209 // Destroy the buffers in random order.
2210 while(!bufInfo.empty())
2211 {
2212 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2213 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2214 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2215 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2216 }
2217 }
2218
2219 // Test stack.
2220 {
2221 // Allocate number of buffers of varying size that surely fit into this block.
2222 for(size_t i = 0; i < maxBufCount; ++i)
2223 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002224 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002225 BufferInfo newBufInfo;
2226 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2227 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002228 TEST(res == VK_SUCCESS);
2229 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002230 bufInfo.push_back(newBufInfo);
2231 prevOffset = allocInfo.offset;
2232 }
2233
2234 // Destroy few buffers from top of the stack.
2235 for(size_t i = 0; i < maxBufCount / 5; ++i)
2236 {
2237 const BufferInfo& currBufInfo = bufInfo.back();
2238 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2239 bufInfo.pop_back();
2240 }
2241
2242 // Create some more
2243 for(size_t i = 0; i < maxBufCount / 5; ++i)
2244 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002245 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002246 BufferInfo newBufInfo;
2247 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2248 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002249 TEST(res == VK_SUCCESS);
2250 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002251 bufInfo.push_back(newBufInfo);
2252 prevOffset = allocInfo.offset;
2253 }
2254
2255 // Destroy the buffers in reverse order.
2256 while(!bufInfo.empty())
2257 {
2258 const BufferInfo& currBufInfo = bufInfo.back();
2259 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2260 bufInfo.pop_back();
2261 }
2262 }
2263
Adam Sawickiee082772018-06-20 17:45:49 +02002264 // Test ring buffer.
2265 {
2266 // Allocate number of buffers that surely fit into this block.
2267 bufCreateInfo.size = bufSizeMax;
2268 for(size_t i = 0; i < maxBufCount; ++i)
2269 {
2270 BufferInfo newBufInfo;
2271 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2272 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002273 TEST(res == VK_SUCCESS);
2274 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002275 bufInfo.push_back(newBufInfo);
2276 prevOffset = allocInfo.offset;
2277 }
2278
2279 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2280 const size_t buffersPerIter = maxBufCount / 10 - 1;
2281 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2282 for(size_t iter = 0; iter < iterCount; ++iter)
2283 {
2284 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2285 {
2286 const BufferInfo& currBufInfo = bufInfo.front();
2287 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2288 bufInfo.erase(bufInfo.begin());
2289 }
2290 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2291 {
2292 BufferInfo newBufInfo;
2293 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2294 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002295 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002296 bufInfo.push_back(newBufInfo);
2297 }
2298 }
2299
2300 // Allocate buffers until we reach out-of-memory.
2301 uint32_t debugIndex = 0;
2302 while(res == VK_SUCCESS)
2303 {
2304 BufferInfo newBufInfo;
2305 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2306 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2307 if(res == VK_SUCCESS)
2308 {
2309 bufInfo.push_back(newBufInfo);
2310 }
2311 else
2312 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002313 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002314 }
2315 ++debugIndex;
2316 }
2317
2318 // Destroy the buffers in random order.
2319 while(!bufInfo.empty())
2320 {
2321 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2322 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2323 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2324 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2325 }
2326 }
2327
Adam Sawicki680b2252018-08-22 14:47:32 +02002328 // Test double stack.
2329 {
2330 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2331 VkDeviceSize prevOffsetLower = 0;
2332 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2333 for(size_t i = 0; i < maxBufCount; ++i)
2334 {
2335 const bool upperAddress = (i % 2) != 0;
2336 if(upperAddress)
2337 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2338 else
2339 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002340 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002341 BufferInfo newBufInfo;
2342 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2343 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002344 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002345 if(upperAddress)
2346 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002347 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002348 prevOffsetUpper = allocInfo.offset;
2349 }
2350 else
2351 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002352 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002353 prevOffsetLower = allocInfo.offset;
2354 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002355 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002356 bufInfo.push_back(newBufInfo);
2357 }
2358
2359 // Destroy few buffers from top of the stack.
2360 for(size_t i = 0; i < maxBufCount / 5; ++i)
2361 {
2362 const BufferInfo& currBufInfo = bufInfo.back();
2363 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2364 bufInfo.pop_back();
2365 }
2366
2367 // Create some more
2368 for(size_t i = 0; i < maxBufCount / 5; ++i)
2369 {
2370 const bool upperAddress = (i % 2) != 0;
2371 if(upperAddress)
2372 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2373 else
2374 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002375 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002376 BufferInfo newBufInfo;
2377 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2378 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002379 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002380 bufInfo.push_back(newBufInfo);
2381 }
2382
2383 // Destroy the buffers in reverse order.
2384 while(!bufInfo.empty())
2385 {
2386 const BufferInfo& currBufInfo = bufInfo.back();
2387 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2388 bufInfo.pop_back();
2389 }
2390
2391 // Create buffers on both sides until we reach out of memory.
2392 prevOffsetLower = 0;
2393 prevOffsetUpper = poolCreateInfo.blockSize;
2394 res = VK_SUCCESS;
2395 for(size_t i = 0; res == VK_SUCCESS; ++i)
2396 {
2397 const bool upperAddress = (i % 2) != 0;
2398 if(upperAddress)
2399 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2400 else
2401 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002402 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002403 BufferInfo newBufInfo;
2404 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2405 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2406 if(res == VK_SUCCESS)
2407 {
2408 if(upperAddress)
2409 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002410 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002411 prevOffsetUpper = allocInfo.offset;
2412 }
2413 else
2414 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002415 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002416 prevOffsetLower = allocInfo.offset;
2417 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002418 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002419 bufInfo.push_back(newBufInfo);
2420 }
2421 }
2422
2423 // Destroy the buffers in random order.
2424 while(!bufInfo.empty())
2425 {
2426 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2427 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2428 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2429 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2430 }
2431
2432 // Create buffers on upper side only, constant size, until we reach out of memory.
2433 prevOffsetUpper = poolCreateInfo.blockSize;
2434 res = VK_SUCCESS;
2435 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2436 bufCreateInfo.size = bufSizeMax;
2437 for(size_t i = 0; res == VK_SUCCESS; ++i)
2438 {
2439 BufferInfo newBufInfo;
2440 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2441 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2442 if(res == VK_SUCCESS)
2443 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002444 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002445 prevOffsetUpper = allocInfo.offset;
2446 bufInfo.push_back(newBufInfo);
2447 }
2448 }
2449
2450 // Destroy the buffers in reverse order.
2451 while(!bufInfo.empty())
2452 {
2453 const BufferInfo& currBufInfo = bufInfo.back();
2454 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2455 bufInfo.pop_back();
2456 }
2457 }
2458
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002459 // Test ring buffer with lost allocations.
2460 {
2461 // Allocate number of buffers until pool is full.
2462 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2463 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2464 res = VK_SUCCESS;
2465 for(size_t i = 0; res == VK_SUCCESS; ++i)
2466 {
2467 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2468
Adam Sawickifd366b62019-01-24 15:26:43 +01002469 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002470
2471 BufferInfo newBufInfo;
2472 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2473 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2474 if(res == VK_SUCCESS)
2475 bufInfo.push_back(newBufInfo);
2476 }
2477
2478 // Free first half of it.
2479 {
2480 const size_t buffersToDelete = bufInfo.size() / 2;
2481 for(size_t i = 0; i < buffersToDelete; ++i)
2482 {
2483 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2484 }
2485 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2486 }
2487
2488 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002489 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002490 res = VK_SUCCESS;
2491 for(size_t i = 0; res == VK_SUCCESS; ++i)
2492 {
2493 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2494
Adam Sawickifd366b62019-01-24 15:26:43 +01002495 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002496
2497 BufferInfo newBufInfo;
2498 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2499 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2500 if(res == VK_SUCCESS)
2501 bufInfo.push_back(newBufInfo);
2502 }
2503
2504 VkDeviceSize firstNewOffset;
2505 {
2506 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2507
2508 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2509 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2510 bufCreateInfo.size = bufSizeMax;
2511
2512 BufferInfo newBufInfo;
2513 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2514 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002515 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002516 bufInfo.push_back(newBufInfo);
2517 firstNewOffset = allocInfo.offset;
2518
2519 // Make sure at least one buffer from the beginning became lost.
2520 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002521 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002522 }
2523
Adam Sawickifd366b62019-01-24 15:26:43 +01002524#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002525 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2526 size_t newCount = 1;
2527 for(;;)
2528 {
2529 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2530
Adam Sawickifd366b62019-01-24 15:26:43 +01002531 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002532
2533 BufferInfo newBufInfo;
2534 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2535 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01002536
Adam Sawickib8d34d52018-10-03 17:41:20 +02002537 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002538 bufInfo.push_back(newBufInfo);
2539 ++newCount;
2540 if(allocInfo.offset < firstNewOffset)
2541 break;
2542 }
Adam Sawickifd366b62019-01-24 15:26:43 +01002543#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002544
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002545 // Delete buffers that are lost.
2546 for(size_t i = bufInfo.size(); i--; )
2547 {
2548 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2549 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2550 {
2551 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2552 bufInfo.erase(bufInfo.begin() + i);
2553 }
2554 }
2555
2556 // Test vmaMakePoolAllocationsLost
2557 {
2558 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2559
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01002560 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002561 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002562 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002563
2564 size_t realLostAllocCount = 0;
2565 for(size_t i = 0; i < bufInfo.size(); ++i)
2566 {
2567 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2568 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2569 ++realLostAllocCount;
2570 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002571 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002572 }
2573
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002574 // Destroy all the buffers in forward order.
2575 for(size_t i = 0; i < bufInfo.size(); ++i)
2576 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2577 bufInfo.clear();
2578 }
2579
Adam Sawicki70a683e2018-08-24 15:36:32 +02002580 vmaDestroyPool(g_hAllocator, pool);
2581}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002582
Adam Sawicki70a683e2018-08-24 15:36:32 +02002583static void TestLinearAllocatorMultiBlock()
2584{
2585 wprintf(L"Test linear allocator multi block\n");
2586
2587 RandomNumberGenerator rand{345673};
2588
2589 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2590 sampleBufCreateInfo.size = 1024 * 1024;
2591 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2592
2593 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2594 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2595
2596 VmaPoolCreateInfo poolCreateInfo = {};
2597 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2598 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002599 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002600
2601 VmaPool pool = nullptr;
2602 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002603 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002604
2605 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2606
2607 VmaAllocationCreateInfo allocCreateInfo = {};
2608 allocCreateInfo.pool = pool;
2609
2610 std::vector<BufferInfo> bufInfo;
2611 VmaAllocationInfo allocInfo;
2612
2613 // Test one-time free.
2614 {
2615 // Allocate buffers until we move to a second block.
2616 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2617 for(uint32_t i = 0; ; ++i)
2618 {
2619 BufferInfo newBufInfo;
2620 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2621 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002622 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002623 bufInfo.push_back(newBufInfo);
2624 if(lastMem && allocInfo.deviceMemory != lastMem)
2625 {
2626 break;
2627 }
2628 lastMem = allocInfo.deviceMemory;
2629 }
2630
Adam Sawickib8d34d52018-10-03 17:41:20 +02002631 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002632
2633 // Make sure that pool has now two blocks.
2634 VmaPoolStats poolStats = {};
2635 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002636 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002637
2638 // Destroy all the buffers in random order.
2639 while(!bufInfo.empty())
2640 {
2641 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2642 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2643 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2644 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2645 }
2646
2647 // Make sure that pool has now at most one block.
2648 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002649 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002650 }
2651
2652 // Test stack.
2653 {
2654 // Allocate buffers until we move to a second block.
2655 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2656 for(uint32_t i = 0; ; ++i)
2657 {
2658 BufferInfo newBufInfo;
2659 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2660 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002661 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002662 bufInfo.push_back(newBufInfo);
2663 if(lastMem && allocInfo.deviceMemory != lastMem)
2664 {
2665 break;
2666 }
2667 lastMem = allocInfo.deviceMemory;
2668 }
2669
Adam Sawickib8d34d52018-10-03 17:41:20 +02002670 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002671
2672 // Add few more buffers.
2673 for(uint32_t i = 0; i < 5; ++i)
2674 {
2675 BufferInfo newBufInfo;
2676 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2677 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002678 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002679 bufInfo.push_back(newBufInfo);
2680 }
2681
2682 // Make sure that pool has now two blocks.
2683 VmaPoolStats poolStats = {};
2684 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002685 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002686
2687 // Delete half of buffers, LIFO.
2688 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2689 {
2690 const BufferInfo& currBufInfo = bufInfo.back();
2691 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2692 bufInfo.pop_back();
2693 }
2694
2695 // Add one more buffer.
2696 BufferInfo newBufInfo;
2697 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2698 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002699 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002700 bufInfo.push_back(newBufInfo);
2701
2702 // Make sure that pool has now one block.
2703 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002704 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002705
2706 // Delete all the remaining buffers, LIFO.
2707 while(!bufInfo.empty())
2708 {
2709 const BufferInfo& currBufInfo = bufInfo.back();
2710 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2711 bufInfo.pop_back();
2712 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002713 }
2714
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002715 vmaDestroyPool(g_hAllocator, pool);
2716}
2717
Adam Sawickifd11d752018-08-22 15:02:10 +02002718static void ManuallyTestLinearAllocator()
2719{
2720 VmaStats origStats;
2721 vmaCalculateStats(g_hAllocator, &origStats);
2722
2723 wprintf(L"Manually test linear allocator\n");
2724
2725 RandomNumberGenerator rand{645332};
2726
2727 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2728 sampleBufCreateInfo.size = 1024; // Whatever.
2729 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2730
2731 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2732 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2733
2734 VmaPoolCreateInfo poolCreateInfo = {};
2735 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002736 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002737
2738 poolCreateInfo.blockSize = 10 * 1024;
2739 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2740 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2741
2742 VmaPool pool = nullptr;
2743 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002744 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002745
2746 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2747
2748 VmaAllocationCreateInfo allocCreateInfo = {};
2749 allocCreateInfo.pool = pool;
2750
2751 std::vector<BufferInfo> bufInfo;
2752 VmaAllocationInfo allocInfo;
2753 BufferInfo newBufInfo;
2754
2755 // Test double stack.
2756 {
2757 /*
2758 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2759 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2760
2761 Totally:
2762 1 block allocated
2763 10240 Vulkan bytes
2764 6 new allocations
2765 2256 bytes in allocations
2766 */
2767
2768 bufCreateInfo.size = 32;
2769 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2770 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002771 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002772 bufInfo.push_back(newBufInfo);
2773
2774 bufCreateInfo.size = 1024;
2775 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2776 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002777 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002778 bufInfo.push_back(newBufInfo);
2779
2780 bufCreateInfo.size = 32;
2781 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2782 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002783 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002784 bufInfo.push_back(newBufInfo);
2785
2786 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2787
2788 bufCreateInfo.size = 128;
2789 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2790 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002791 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002792 bufInfo.push_back(newBufInfo);
2793
2794 bufCreateInfo.size = 1024;
2795 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2796 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002797 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002798 bufInfo.push_back(newBufInfo);
2799
2800 bufCreateInfo.size = 16;
2801 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2802 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002803 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002804 bufInfo.push_back(newBufInfo);
2805
2806 VmaStats currStats;
2807 vmaCalculateStats(g_hAllocator, &currStats);
2808 VmaPoolStats poolStats;
2809 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2810
2811 char* statsStr = nullptr;
2812 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2813
2814 // PUT BREAKPOINT HERE TO CHECK.
2815 // Inspect: currStats versus origStats, poolStats, statsStr.
2816 int I = 0;
2817
2818 vmaFreeStatsString(g_hAllocator, statsStr);
2819
2820 // Destroy the buffers in reverse order.
2821 while(!bufInfo.empty())
2822 {
2823 const BufferInfo& currBufInfo = bufInfo.back();
2824 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2825 bufInfo.pop_back();
2826 }
2827 }
2828
2829 vmaDestroyPool(g_hAllocator, pool);
2830}
2831
Adam Sawicki80927152018-09-07 17:27:23 +02002832static void BenchmarkAlgorithmsCase(FILE* file,
2833 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002834 bool empty,
2835 VmaAllocationCreateFlags allocStrategy,
2836 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002837{
2838 RandomNumberGenerator rand{16223};
2839
2840 const VkDeviceSize bufSizeMin = 32;
2841 const VkDeviceSize bufSizeMax = 1024;
2842 const size_t maxBufCapacity = 10000;
2843 const uint32_t iterationCount = 10;
2844
2845 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2846 sampleBufCreateInfo.size = bufSizeMax;
2847 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2848
2849 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2850 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2851
2852 VmaPoolCreateInfo poolCreateInfo = {};
2853 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002854 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002855
2856 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002857 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002858 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2859
2860 VmaPool pool = nullptr;
2861 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002862 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002863
2864 // Buffer created just to get memory requirements. Never bound to any memory.
2865 VkBuffer dummyBuffer = VK_NULL_HANDLE;
Adam Sawicki1f84f622019-07-02 13:40:01 +02002866 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002867 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002868
2869 VkMemoryRequirements memReq = {};
2870 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2871
Adam Sawicki1f84f622019-07-02 13:40:01 +02002872 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawicki0a607132018-08-24 11:18:41 +02002873
2874 VmaAllocationCreateInfo allocCreateInfo = {};
2875 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002876 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002877
2878 VmaAllocation alloc;
2879 std::vector<VmaAllocation> baseAllocations;
2880
2881 if(!empty)
2882 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002883 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002884 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002885 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002886 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01002887 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
2888 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02002889 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2890 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002891 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002892 baseAllocations.push_back(alloc);
2893 totalSize += memReq.size;
2894 }
2895
2896 // Delete half of them, choose randomly.
2897 size_t allocsToDelete = baseAllocations.size() / 2;
2898 for(size_t i = 0; i < allocsToDelete; ++i)
2899 {
2900 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2901 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2902 baseAllocations.erase(baseAllocations.begin() + index);
2903 }
2904 }
2905
2906 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002907 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002908 std::vector<VmaAllocation> testAllocations;
2909 testAllocations.reserve(allocCount);
2910 duration allocTotalDuration = duration::zero();
2911 duration freeTotalDuration = duration::zero();
2912 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2913 {
2914 // Allocations
2915 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2916 for(size_t i = 0; i < allocCount; ++i)
2917 {
2918 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2919 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002920 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002921 testAllocations.push_back(alloc);
2922 }
2923 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2924
2925 // Deallocations
2926 switch(freeOrder)
2927 {
2928 case FREE_ORDER::FORWARD:
2929 // Leave testAllocations unchanged.
2930 break;
2931 case FREE_ORDER::BACKWARD:
2932 std::reverse(testAllocations.begin(), testAllocations.end());
2933 break;
2934 case FREE_ORDER::RANDOM:
2935 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2936 break;
2937 default: assert(0);
2938 }
2939
2940 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2941 for(size_t i = 0; i < allocCount; ++i)
2942 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2943 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2944
2945 testAllocations.clear();
2946 }
2947
2948 // Delete baseAllocations
2949 while(!baseAllocations.empty())
2950 {
2951 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2952 baseAllocations.pop_back();
2953 }
2954
2955 vmaDestroyPool(g_hAllocator, pool);
2956
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002957 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2958 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2959
Adam Sawicki80927152018-09-07 17:27:23 +02002960 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2961 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002962 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002963 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002964 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002965 allocTotalSeconds,
2966 freeTotalSeconds);
2967
2968 if(file)
2969 {
2970 std::string currTime;
2971 CurrentTimeToStr(currTime);
2972
Adam Sawicki80927152018-09-07 17:27:23 +02002973 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002974 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002975 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002976 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002977 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002978 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2979 allocTotalSeconds,
2980 freeTotalSeconds);
2981 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002982}
2983
Adam Sawicki80927152018-09-07 17:27:23 +02002984static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002985{
Adam Sawicki80927152018-09-07 17:27:23 +02002986 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002987
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002988 if(file)
2989 {
2990 fprintf(file,
2991 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002992 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002993 "Allocation time (s),Deallocation time (s)\n");
2994 }
2995
Adam Sawicki0a607132018-08-24 11:18:41 +02002996 uint32_t freeOrderCount = 1;
2997 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2998 freeOrderCount = 3;
2999 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
3000 freeOrderCount = 2;
3001
3002 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003003 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003004
3005 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3006 {
3007 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3008 switch(freeOrderIndex)
3009 {
3010 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3011 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3012 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3013 default: assert(0);
3014 }
3015
3016 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3017 {
Adam Sawicki80927152018-09-07 17:27:23 +02003018 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003019 {
Adam Sawicki80927152018-09-07 17:27:23 +02003020 uint32_t algorithm = 0;
3021 switch(algorithmIndex)
3022 {
3023 case 0:
3024 break;
3025 case 1:
3026 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3027 break;
3028 case 2:
3029 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3030 break;
3031 default:
3032 assert(0);
3033 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003034
Adam Sawicki80927152018-09-07 17:27:23 +02003035 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003036 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3037 {
3038 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003039 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003040 {
3041 switch(allocStrategyIndex)
3042 {
3043 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3044 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3045 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3046 default: assert(0);
3047 }
3048 }
3049
Adam Sawicki80927152018-09-07 17:27:23 +02003050 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003051 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003052 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003053 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003054 strategy,
3055 freeOrder); // freeOrder
3056 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003057 }
3058 }
3059 }
3060}
3061
Adam Sawickib8333fb2018-03-13 16:15:53 +01003062static void TestPool_SameSize()
3063{
3064 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3065 const size_t BUF_COUNT = 100;
3066 VkResult res;
3067
3068 RandomNumberGenerator rand{123};
3069
3070 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3071 bufferInfo.size = BUF_SIZE;
3072 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3073
3074 uint32_t memoryTypeBits = UINT32_MAX;
3075 {
3076 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003077 res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003078 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003079
3080 VkMemoryRequirements memReq;
3081 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3082 memoryTypeBits = memReq.memoryTypeBits;
3083
Adam Sawicki1f84f622019-07-02 13:40:01 +02003084 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003085 }
3086
3087 VmaAllocationCreateInfo poolAllocInfo = {};
3088 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3089 uint32_t memTypeIndex;
3090 res = vmaFindMemoryTypeIndex(
3091 g_hAllocator,
3092 memoryTypeBits,
3093 &poolAllocInfo,
3094 &memTypeIndex);
3095
3096 VmaPoolCreateInfo poolCreateInfo = {};
3097 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3098 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3099 poolCreateInfo.minBlockCount = 1;
3100 poolCreateInfo.maxBlockCount = 4;
3101 poolCreateInfo.frameInUseCount = 0;
3102
3103 VmaPool pool;
3104 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003105 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003106
Adam Sawickia020fb82019-11-02 14:43:06 +01003107 // Test pool name
3108 {
3109 static const char* const POOL_NAME = "Pool name";
3110 vmaSetPoolName(g_hAllocator, pool, POOL_NAME);
3111
3112 const char* fetchedPoolName = nullptr;
3113 vmaGetPoolName(g_hAllocator, pool, &fetchedPoolName);
3114 TEST(strcmp(fetchedPoolName, POOL_NAME) == 0);
3115
Adam Sawickia020fb82019-11-02 14:43:06 +01003116 vmaSetPoolName(g_hAllocator, pool, nullptr);
3117 }
3118
Adam Sawickib8333fb2018-03-13 16:15:53 +01003119 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3120
3121 VmaAllocationCreateInfo allocInfo = {};
3122 allocInfo.pool = pool;
3123 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3124 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3125
3126 struct BufItem
3127 {
3128 VkBuffer Buf;
3129 VmaAllocation Alloc;
3130 };
3131 std::vector<BufItem> items;
3132
3133 // Fill entire pool.
3134 for(size_t i = 0; i < BUF_COUNT; ++i)
3135 {
3136 BufItem item;
3137 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003138 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003139 items.push_back(item);
3140 }
3141
3142 // Make sure that another allocation would fail.
3143 {
3144 BufItem item;
3145 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003146 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003147 }
3148
3149 // Validate that no buffer is lost. Also check that they are not mapped.
3150 for(size_t i = 0; i < items.size(); ++i)
3151 {
3152 VmaAllocationInfo allocInfo;
3153 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003154 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3155 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003156 }
3157
3158 // Free some percent of random items.
3159 {
3160 const size_t PERCENT_TO_FREE = 10;
3161 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3162 for(size_t i = 0; i < itemsToFree; ++i)
3163 {
3164 size_t index = (size_t)rand.Generate() % items.size();
3165 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3166 items.erase(items.begin() + index);
3167 }
3168 }
3169
3170 // Randomly allocate and free items.
3171 {
3172 const size_t OPERATION_COUNT = BUF_COUNT;
3173 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3174 {
3175 bool allocate = rand.Generate() % 2 != 0;
3176 if(allocate)
3177 {
3178 if(items.size() < BUF_COUNT)
3179 {
3180 BufItem item;
3181 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003182 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003183 items.push_back(item);
3184 }
3185 }
3186 else // Free
3187 {
3188 if(!items.empty())
3189 {
3190 size_t index = (size_t)rand.Generate() % items.size();
3191 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3192 items.erase(items.begin() + index);
3193 }
3194 }
3195 }
3196 }
3197
3198 // Allocate up to maximum.
3199 while(items.size() < BUF_COUNT)
3200 {
3201 BufItem item;
3202 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003203 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003204 items.push_back(item);
3205 }
3206
3207 // Validate that no buffer is lost.
3208 for(size_t i = 0; i < items.size(); ++i)
3209 {
3210 VmaAllocationInfo allocInfo;
3211 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003212 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003213 }
3214
3215 // Next frame.
3216 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3217
3218 // Allocate another BUF_COUNT buffers.
3219 for(size_t i = 0; i < BUF_COUNT; ++i)
3220 {
3221 BufItem item;
3222 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003223 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003224 items.push_back(item);
3225 }
3226
3227 // Make sure the first BUF_COUNT is lost. Delete them.
3228 for(size_t i = 0; i < BUF_COUNT; ++i)
3229 {
3230 VmaAllocationInfo allocInfo;
3231 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003232 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003233 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3234 }
3235 items.erase(items.begin(), items.begin() + BUF_COUNT);
3236
3237 // Validate that no buffer is lost.
3238 for(size_t i = 0; i < items.size(); ++i)
3239 {
3240 VmaAllocationInfo allocInfo;
3241 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003242 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003243 }
3244
3245 // Free one item.
3246 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3247 items.pop_back();
3248
3249 // Validate statistics.
3250 {
3251 VmaPoolStats poolStats = {};
3252 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003253 TEST(poolStats.allocationCount == items.size());
3254 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3255 TEST(poolStats.unusedRangeCount == 1);
3256 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3257 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003258 }
3259
3260 // Free all remaining items.
3261 for(size_t i = items.size(); i--; )
3262 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3263 items.clear();
3264
3265 // Allocate maximum items again.
3266 for(size_t i = 0; i < BUF_COUNT; ++i)
3267 {
3268 BufItem item;
3269 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003270 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003271 items.push_back(item);
3272 }
3273
3274 // Delete every other item.
3275 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3276 {
3277 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3278 items.erase(items.begin() + i);
3279 }
3280
3281 // Defragment!
3282 {
3283 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3284 for(size_t i = 0; i < items.size(); ++i)
3285 allocationsToDefragment[i] = items[i].Alloc;
3286
3287 VmaDefragmentationStats defragmentationStats;
3288 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003289 TEST(res == VK_SUCCESS);
3290 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003291 }
3292
3293 // Free all remaining items.
3294 for(size_t i = items.size(); i--; )
3295 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3296 items.clear();
3297
3298 ////////////////////////////////////////////////////////////////////////////////
3299 // Test for vmaMakePoolAllocationsLost
3300
3301 // Allocate 4 buffers on frame 10.
3302 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3303 for(size_t i = 0; i < 4; ++i)
3304 {
3305 BufItem item;
3306 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003307 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003308 items.push_back(item);
3309 }
3310
3311 // Touch first 2 of them on frame 11.
3312 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3313 for(size_t i = 0; i < 2; ++i)
3314 {
3315 VmaAllocationInfo allocInfo;
3316 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3317 }
3318
3319 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3320 size_t lostCount = 0xDEADC0DE;
3321 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003322 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003323
3324 // Make another call. Now 0 should be lost.
3325 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003326 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003327
3328 // Make another call, with null count. Should not crash.
3329 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3330
3331 // END: Free all remaining items.
3332 for(size_t i = items.size(); i--; )
3333 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3334
3335 items.clear();
3336
Adam Sawickid2924172018-06-11 12:48:46 +02003337 ////////////////////////////////////////////////////////////////////////////////
3338 // Test for allocation too large for pool
3339
3340 {
3341 VmaAllocationCreateInfo allocCreateInfo = {};
3342 allocCreateInfo.pool = pool;
3343
3344 VkMemoryRequirements memReq;
3345 memReq.memoryTypeBits = UINT32_MAX;
3346 memReq.alignment = 1;
3347 memReq.size = poolCreateInfo.blockSize + 4;
3348
3349 VmaAllocation alloc = nullptr;
3350 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003351 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003352 }
3353
Adam Sawickib8333fb2018-03-13 16:15:53 +01003354 vmaDestroyPool(g_hAllocator, pool);
3355}
3356
Adam Sawickie44c6262018-06-15 14:30:39 +02003357static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3358{
3359 const uint8_t* pBytes = (const uint8_t*)pMemory;
3360 for(size_t i = 0; i < size; ++i)
3361 {
3362 if(pBytes[i] != pattern)
3363 {
3364 return false;
3365 }
3366 }
3367 return true;
3368}
3369
3370static void TestAllocationsInitialization()
3371{
3372 VkResult res;
3373
3374 const size_t BUF_SIZE = 1024;
3375
3376 // Create pool.
3377
3378 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3379 bufInfo.size = BUF_SIZE;
3380 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3381
3382 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3383 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3384
3385 VmaPoolCreateInfo poolCreateInfo = {};
3386 poolCreateInfo.blockSize = BUF_SIZE * 10;
3387 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3388 poolCreateInfo.maxBlockCount = 1;
3389 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003390 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003391
3392 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3393 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003394 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003395
3396 // Create one persistently mapped buffer to keep memory of this block mapped,
3397 // so that pointer to mapped data will remain (more or less...) valid even
3398 // after destruction of other allocations.
3399
3400 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3401 VkBuffer firstBuf;
3402 VmaAllocation firstAlloc;
3403 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003404 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003405
3406 // Test buffers.
3407
3408 for(uint32_t i = 0; i < 2; ++i)
3409 {
3410 const bool persistentlyMapped = i == 0;
3411 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3412 VkBuffer buf;
3413 VmaAllocation alloc;
3414 VmaAllocationInfo allocInfo;
3415 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003416 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003417
3418 void* pMappedData;
3419 if(!persistentlyMapped)
3420 {
3421 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003422 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003423 }
3424 else
3425 {
3426 pMappedData = allocInfo.pMappedData;
3427 }
3428
3429 // Validate initialized content
3430 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003431 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003432
3433 if(!persistentlyMapped)
3434 {
3435 vmaUnmapMemory(g_hAllocator, alloc);
3436 }
3437
3438 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3439
3440 // Validate freed content
3441 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003442 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003443 }
3444
3445 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3446 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3447}
3448
Adam Sawickib8333fb2018-03-13 16:15:53 +01003449static void TestPool_Benchmark(
3450 PoolTestResult& outResult,
3451 const PoolTestConfig& config)
3452{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003453 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003454
3455 RandomNumberGenerator mainRand{config.RandSeed};
3456
3457 uint32_t allocationSizeProbabilitySum = std::accumulate(
3458 config.AllocationSizes.begin(),
3459 config.AllocationSizes.end(),
3460 0u,
3461 [](uint32_t sum, const AllocationSize& allocSize) {
3462 return sum + allocSize.Probability;
3463 });
3464
3465 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3466 bufferInfo.size = 256; // Whatever.
3467 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3468
3469 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3470 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3471 imageInfo.extent.width = 256; // Whatever.
3472 imageInfo.extent.height = 256; // Whatever.
3473 imageInfo.extent.depth = 1;
3474 imageInfo.mipLevels = 1;
3475 imageInfo.arrayLayers = 1;
3476 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3477 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3478 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3479 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3480 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3481
3482 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3483 {
3484 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003485 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003486 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003487
3488 VkMemoryRequirements memReq;
3489 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3490 bufferMemoryTypeBits = memReq.memoryTypeBits;
3491
Adam Sawicki1f84f622019-07-02 13:40:01 +02003492 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003493 }
3494
3495 uint32_t imageMemoryTypeBits = UINT32_MAX;
3496 {
3497 VkImage dummyImage;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003498 VkResult res = vkCreateImage(g_hDevice, &imageInfo, g_Allocs, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003499 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003500
3501 VkMemoryRequirements memReq;
3502 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3503 imageMemoryTypeBits = memReq.memoryTypeBits;
3504
Adam Sawicki1f84f622019-07-02 13:40:01 +02003505 vkDestroyImage(g_hDevice, dummyImage, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003506 }
3507
3508 uint32_t memoryTypeBits = 0;
3509 if(config.UsesBuffers() && config.UsesImages())
3510 {
3511 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3512 if(memoryTypeBits == 0)
3513 {
3514 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3515 return;
3516 }
3517 }
3518 else if(config.UsesBuffers())
3519 memoryTypeBits = bufferMemoryTypeBits;
3520 else if(config.UsesImages())
3521 memoryTypeBits = imageMemoryTypeBits;
3522 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003523 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003524
3525 VmaPoolCreateInfo poolCreateInfo = {};
3526 poolCreateInfo.memoryTypeIndex = 0;
3527 poolCreateInfo.minBlockCount = 1;
3528 poolCreateInfo.maxBlockCount = 1;
3529 poolCreateInfo.blockSize = config.PoolSize;
3530 poolCreateInfo.frameInUseCount = 1;
3531
3532 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3533 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3534 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3535
3536 VmaPool pool;
3537 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003538 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003539
3540 // Start time measurement - after creating pool and initializing data structures.
3541 time_point timeBeg = std::chrono::high_resolution_clock::now();
3542
3543 ////////////////////////////////////////////////////////////////////////////////
3544 // ThreadProc
3545 auto ThreadProc = [&](
3546 PoolTestThreadResult* outThreadResult,
3547 uint32_t randSeed,
3548 HANDLE frameStartEvent,
3549 HANDLE frameEndEvent) -> void
3550 {
3551 RandomNumberGenerator threadRand{randSeed};
3552
3553 outThreadResult->AllocationTimeMin = duration::max();
3554 outThreadResult->AllocationTimeSum = duration::zero();
3555 outThreadResult->AllocationTimeMax = duration::min();
3556 outThreadResult->DeallocationTimeMin = duration::max();
3557 outThreadResult->DeallocationTimeSum = duration::zero();
3558 outThreadResult->DeallocationTimeMax = duration::min();
3559 outThreadResult->AllocationCount = 0;
3560 outThreadResult->DeallocationCount = 0;
3561 outThreadResult->LostAllocationCount = 0;
3562 outThreadResult->LostAllocationTotalSize = 0;
3563 outThreadResult->FailedAllocationCount = 0;
3564 outThreadResult->FailedAllocationTotalSize = 0;
3565
3566 struct Item
3567 {
3568 VkDeviceSize BufferSize;
3569 VkExtent2D ImageSize;
3570 VkBuffer Buf;
3571 VkImage Image;
3572 VmaAllocation Alloc;
3573
3574 VkDeviceSize CalcSizeBytes() const
3575 {
3576 return BufferSize +
3577 ImageSize.width * ImageSize.height * 4;
3578 }
3579 };
3580 std::vector<Item> unusedItems, usedItems;
3581
3582 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3583
3584 // Create all items - all unused, not yet allocated.
3585 for(size_t i = 0; i < threadTotalItemCount; ++i)
3586 {
3587 Item item = {};
3588
3589 uint32_t allocSizeIndex = 0;
3590 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3591 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3592 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3593
3594 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3595 if(allocSize.BufferSizeMax > 0)
3596 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003597 TEST(allocSize.BufferSizeMin > 0);
3598 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003599 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3600 item.BufferSize = allocSize.BufferSizeMin;
3601 else
3602 {
3603 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3604 item.BufferSize = item.BufferSize / 16 * 16;
3605 }
3606 }
3607 else
3608 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003609 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003610 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3611 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3612 else
3613 {
3614 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3615 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3616 }
3617 }
3618
3619 unusedItems.push_back(item);
3620 }
3621
3622 auto Allocate = [&](Item& item) -> VkResult
3623 {
3624 VmaAllocationCreateInfo allocCreateInfo = {};
3625 allocCreateInfo.pool = pool;
3626 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3627 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3628
3629 if(item.BufferSize)
3630 {
3631 bufferInfo.size = item.BufferSize;
3632 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3633 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3634 }
3635 else
3636 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003637 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003638
3639 imageInfo.extent.width = item.ImageSize.width;
3640 imageInfo.extent.height = item.ImageSize.height;
3641 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3642 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3643 }
3644 };
3645
3646 ////////////////////////////////////////////////////////////////////////////////
3647 // Frames
3648 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3649 {
3650 WaitForSingleObject(frameStartEvent, INFINITE);
3651
3652 // Always make some percent of used bufs unused, to choose different used ones.
3653 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3654 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3655 {
3656 size_t index = threadRand.Generate() % usedItems.size();
3657 unusedItems.push_back(usedItems[index]);
3658 usedItems.erase(usedItems.begin() + index);
3659 }
3660
3661 // Determine which bufs we want to use in this frame.
3662 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3663 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003664 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003665 // Move some used to unused.
3666 while(usedBufCount < usedItems.size())
3667 {
3668 size_t index = threadRand.Generate() % usedItems.size();
3669 unusedItems.push_back(usedItems[index]);
3670 usedItems.erase(usedItems.begin() + index);
3671 }
3672 // Move some unused to used.
3673 while(usedBufCount > usedItems.size())
3674 {
3675 size_t index = threadRand.Generate() % unusedItems.size();
3676 usedItems.push_back(unusedItems[index]);
3677 unusedItems.erase(unusedItems.begin() + index);
3678 }
3679
3680 uint32_t touchExistingCount = 0;
3681 uint32_t touchLostCount = 0;
3682 uint32_t createSucceededCount = 0;
3683 uint32_t createFailedCount = 0;
3684
3685 // Touch all used bufs. If not created or lost, allocate.
3686 for(size_t i = 0; i < usedItems.size(); ++i)
3687 {
3688 Item& item = usedItems[i];
3689 // Not yet created.
3690 if(item.Alloc == VK_NULL_HANDLE)
3691 {
3692 res = Allocate(item);
3693 ++outThreadResult->AllocationCount;
3694 if(res != VK_SUCCESS)
3695 {
3696 item.Alloc = VK_NULL_HANDLE;
3697 item.Buf = VK_NULL_HANDLE;
3698 ++outThreadResult->FailedAllocationCount;
3699 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3700 ++createFailedCount;
3701 }
3702 else
3703 ++createSucceededCount;
3704 }
3705 else
3706 {
3707 // Touch.
3708 VmaAllocationInfo allocInfo;
3709 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3710 // Lost.
3711 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3712 {
3713 ++touchLostCount;
3714
3715 // Destroy.
3716 {
3717 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3718 if(item.Buf)
3719 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3720 else
3721 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3722 ++outThreadResult->DeallocationCount;
3723 }
3724 item.Alloc = VK_NULL_HANDLE;
3725 item.Buf = VK_NULL_HANDLE;
3726
3727 ++outThreadResult->LostAllocationCount;
3728 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3729
3730 // Recreate.
3731 res = Allocate(item);
3732 ++outThreadResult->AllocationCount;
3733 // Creation failed.
3734 if(res != VK_SUCCESS)
3735 {
3736 ++outThreadResult->FailedAllocationCount;
3737 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3738 ++createFailedCount;
3739 }
3740 else
3741 ++createSucceededCount;
3742 }
3743 else
3744 ++touchExistingCount;
3745 }
3746 }
3747
3748 /*
3749 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3750 randSeed, frameIndex,
3751 touchExistingCount, touchLostCount,
3752 createSucceededCount, createFailedCount);
3753 */
3754
3755 SetEvent(frameEndEvent);
3756 }
3757
3758 // Free all remaining items.
3759 for(size_t i = usedItems.size(); i--; )
3760 {
3761 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3762 if(usedItems[i].Buf)
3763 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3764 else
3765 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3766 ++outThreadResult->DeallocationCount;
3767 }
3768 for(size_t i = unusedItems.size(); i--; )
3769 {
3770 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3771 if(unusedItems[i].Buf)
3772 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3773 else
3774 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3775 ++outThreadResult->DeallocationCount;
3776 }
3777 };
3778
3779 // Launch threads.
3780 uint32_t threadRandSeed = mainRand.Generate();
3781 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3782 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3783 std::vector<std::thread> bkgThreads;
3784 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3785 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3786 {
3787 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3788 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3789 bkgThreads.emplace_back(std::bind(
3790 ThreadProc,
3791 &threadResults[threadIndex],
3792 threadRandSeed + threadIndex,
3793 frameStartEvents[threadIndex],
3794 frameEndEvents[threadIndex]));
3795 }
3796
3797 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003798 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003799 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3800 {
3801 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3802 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3803 SetEvent(frameStartEvents[threadIndex]);
3804 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3805 }
3806
3807 // Wait for threads finished
3808 for(size_t i = 0; i < bkgThreads.size(); ++i)
3809 {
3810 bkgThreads[i].join();
3811 CloseHandle(frameEndEvents[i]);
3812 CloseHandle(frameStartEvents[i]);
3813 }
3814 bkgThreads.clear();
3815
3816 // Finish time measurement - before destroying pool.
3817 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3818
3819 vmaDestroyPool(g_hAllocator, pool);
3820
3821 outResult.AllocationTimeMin = duration::max();
3822 outResult.AllocationTimeAvg = duration::zero();
3823 outResult.AllocationTimeMax = duration::min();
3824 outResult.DeallocationTimeMin = duration::max();
3825 outResult.DeallocationTimeAvg = duration::zero();
3826 outResult.DeallocationTimeMax = duration::min();
3827 outResult.LostAllocationCount = 0;
3828 outResult.LostAllocationTotalSize = 0;
3829 outResult.FailedAllocationCount = 0;
3830 outResult.FailedAllocationTotalSize = 0;
3831 size_t allocationCount = 0;
3832 size_t deallocationCount = 0;
3833 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3834 {
3835 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3836 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3837 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3838 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3839 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3840 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3841 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3842 allocationCount += threadResult.AllocationCount;
3843 deallocationCount += threadResult.DeallocationCount;
3844 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3845 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3846 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3847 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3848 }
3849 if(allocationCount)
3850 outResult.AllocationTimeAvg /= allocationCount;
3851 if(deallocationCount)
3852 outResult.DeallocationTimeAvg /= deallocationCount;
3853}
3854
3855static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3856{
3857 if(ptr1 < ptr2)
3858 return ptr1 + size1 > ptr2;
3859 else if(ptr2 < ptr1)
3860 return ptr2 + size2 > ptr1;
3861 else
3862 return true;
3863}
3864
3865static void TestMapping()
3866{
3867 wprintf(L"Testing mapping...\n");
3868
3869 VkResult res;
3870 uint32_t memTypeIndex = UINT32_MAX;
3871
3872 enum TEST
3873 {
3874 TEST_NORMAL,
3875 TEST_POOL,
3876 TEST_DEDICATED,
3877 TEST_COUNT
3878 };
3879 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
3880 {
3881 VmaPool pool = nullptr;
3882 if(testIndex == TEST_POOL)
3883 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003884 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003885 VmaPoolCreateInfo poolInfo = {};
3886 poolInfo.memoryTypeIndex = memTypeIndex;
3887 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003888 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003889 }
3890
3891 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3892 bufInfo.size = 0x10000;
3893 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3894
3895 VmaAllocationCreateInfo allocCreateInfo = {};
3896 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3897 allocCreateInfo.pool = pool;
3898 if(testIndex == TEST_DEDICATED)
3899 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3900
3901 VmaAllocationInfo allocInfo;
3902
3903 // Mapped manually
3904
3905 // Create 2 buffers.
3906 BufferInfo bufferInfos[3];
3907 for(size_t i = 0; i < 2; ++i)
3908 {
3909 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3910 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003911 TEST(res == VK_SUCCESS);
3912 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003913 memTypeIndex = allocInfo.memoryType;
3914 }
3915
3916 // Map buffer 0.
3917 char* data00 = nullptr;
3918 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003919 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003920 data00[0xFFFF] = data00[0];
3921
3922 // Map buffer 0 second time.
3923 char* data01 = nullptr;
3924 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003925 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003926
3927 // Map buffer 1.
3928 char* data1 = nullptr;
3929 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003930 TEST(res == VK_SUCCESS && data1 != nullptr);
3931 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01003932 data1[0xFFFF] = data1[0];
3933
3934 // Unmap buffer 0 two times.
3935 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3936 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
3937 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003938 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003939
3940 // Unmap buffer 1.
3941 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
3942 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003943 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003944
3945 // Create 3rd buffer - persistently mapped.
3946 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
3947 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
3948 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003949 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003950
3951 // Map buffer 2.
3952 char* data2 = nullptr;
3953 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003954 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003955 data2[0xFFFF] = data2[0];
3956
3957 // Unmap buffer 2.
3958 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
3959 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003960 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003961
3962 // Destroy all buffers.
3963 for(size_t i = 3; i--; )
3964 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
3965
3966 vmaDestroyPool(g_hAllocator, pool);
3967 }
3968}
3969
Adam Sawickidaa6a552019-06-25 15:26:37 +02003970// Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
3971static void TestDeviceLocalMapped()
3972{
3973 VkResult res;
3974
3975 for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
3976 {
3977 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3978 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3979 bufCreateInfo.size = 4096;
3980
3981 VmaPool pool = VK_NULL_HANDLE;
3982 VmaAllocationCreateInfo allocCreateInfo = {};
3983 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3984 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3985 if(testIndex == 2)
3986 {
3987 VmaPoolCreateInfo poolCreateInfo = {};
3988 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3989 TEST(res == VK_SUCCESS);
3990 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
3991 TEST(res == VK_SUCCESS);
3992 allocCreateInfo.pool = pool;
3993 }
3994 else if(testIndex == 1)
3995 {
3996 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3997 }
3998
3999 VkBuffer buf = VK_NULL_HANDLE;
4000 VmaAllocation alloc = VK_NULL_HANDLE;
4001 VmaAllocationInfo allocInfo = {};
4002 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
4003 TEST(res == VK_SUCCESS && alloc);
4004
4005 VkMemoryPropertyFlags memTypeFlags = 0;
4006 vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
4007 const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
4008 TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
4009
4010 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4011 vmaDestroyPool(g_hAllocator, pool);
4012 }
4013}
4014
Adam Sawickib8333fb2018-03-13 16:15:53 +01004015static void TestMappingMultithreaded()
4016{
4017 wprintf(L"Testing mapping multithreaded...\n");
4018
4019 static const uint32_t threadCount = 16;
4020 static const uint32_t bufferCount = 1024;
4021 static const uint32_t threadBufferCount = bufferCount / threadCount;
4022
4023 VkResult res;
4024 volatile uint32_t memTypeIndex = UINT32_MAX;
4025
4026 enum TEST
4027 {
4028 TEST_NORMAL,
4029 TEST_POOL,
4030 TEST_DEDICATED,
4031 TEST_COUNT
4032 };
4033 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4034 {
4035 VmaPool pool = nullptr;
4036 if(testIndex == TEST_POOL)
4037 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004038 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004039 VmaPoolCreateInfo poolInfo = {};
4040 poolInfo.memoryTypeIndex = memTypeIndex;
4041 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004042 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004043 }
4044
4045 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4046 bufCreateInfo.size = 0x10000;
4047 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4048
4049 VmaAllocationCreateInfo allocCreateInfo = {};
4050 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4051 allocCreateInfo.pool = pool;
4052 if(testIndex == TEST_DEDICATED)
4053 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4054
4055 std::thread threads[threadCount];
4056 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4057 {
4058 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4059 // ======== THREAD FUNCTION ========
4060
4061 RandomNumberGenerator rand{threadIndex};
4062
4063 enum class MODE
4064 {
4065 // Don't map this buffer at all.
4066 DONT_MAP,
4067 // Map and quickly unmap.
4068 MAP_FOR_MOMENT,
4069 // Map and unmap before destruction.
4070 MAP_FOR_LONGER,
4071 // Map two times. Quickly unmap, second unmap before destruction.
4072 MAP_TWO_TIMES,
4073 // Create this buffer as persistently mapped.
4074 PERSISTENTLY_MAPPED,
4075 COUNT
4076 };
4077 std::vector<BufferInfo> bufInfos{threadBufferCount};
4078 std::vector<MODE> bufModes{threadBufferCount};
4079
4080 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4081 {
4082 BufferInfo& bufInfo = bufInfos[bufferIndex];
4083 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4084 bufModes[bufferIndex] = mode;
4085
4086 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4087 if(mode == MODE::PERSISTENTLY_MAPPED)
4088 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4089
4090 VmaAllocationInfo allocInfo;
4091 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4092 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004093 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004094
4095 if(memTypeIndex == UINT32_MAX)
4096 memTypeIndex = allocInfo.memoryType;
4097
4098 char* data = nullptr;
4099
4100 if(mode == MODE::PERSISTENTLY_MAPPED)
4101 {
4102 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004103 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004104 }
4105 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4106 mode == MODE::MAP_TWO_TIMES)
4107 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004108 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004109 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004110 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004111
4112 if(mode == MODE::MAP_TWO_TIMES)
4113 {
4114 char* data2 = nullptr;
4115 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004116 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004117 }
4118 }
4119 else if(mode == MODE::DONT_MAP)
4120 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004121 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004122 }
4123 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004124 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004125
4126 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4127 if(data)
4128 data[0xFFFF] = data[0];
4129
4130 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4131 {
4132 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4133
4134 VmaAllocationInfo allocInfo;
4135 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4136 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004137 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004138 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004139 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004140 }
4141
4142 switch(rand.Generate() % 3)
4143 {
4144 case 0: Sleep(0); break; // Yield.
4145 case 1: Sleep(10); break; // 10 ms
4146 // default: No sleep.
4147 }
4148
4149 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4150 if(data)
4151 data[0xFFFF] = data[0];
4152 }
4153
4154 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4155 {
4156 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4157 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4158 {
4159 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4160
4161 VmaAllocationInfo allocInfo;
4162 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004163 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004164 }
4165
4166 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4167 }
4168 });
4169 }
4170
4171 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4172 threads[threadIndex].join();
4173
4174 vmaDestroyPool(g_hAllocator, pool);
4175 }
4176}
4177
4178static void WriteMainTestResultHeader(FILE* file)
4179{
4180 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004181 "Code,Time,"
4182 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004183 "Total Time (us),"
4184 "Allocation Time Min (us),"
4185 "Allocation Time Avg (us),"
4186 "Allocation Time Max (us),"
4187 "Deallocation Time Min (us),"
4188 "Deallocation Time Avg (us),"
4189 "Deallocation Time Max (us),"
4190 "Total Memory Allocated (B),"
4191 "Free Range Size Avg (B),"
4192 "Free Range Size Max (B)\n");
4193}
4194
4195static void WriteMainTestResult(
4196 FILE* file,
4197 const char* codeDescription,
4198 const char* testDescription,
4199 const Config& config, const Result& result)
4200{
4201 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4202 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4203 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4204 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4205 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4206 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4207 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4208
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004209 std::string currTime;
4210 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004211
4212 fprintf(file,
4213 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004214 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4215 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004216 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004217 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004218 totalTimeSeconds * 1e6f,
4219 allocationTimeMinSeconds * 1e6f,
4220 allocationTimeAvgSeconds * 1e6f,
4221 allocationTimeMaxSeconds * 1e6f,
4222 deallocationTimeMinSeconds * 1e6f,
4223 deallocationTimeAvgSeconds * 1e6f,
4224 deallocationTimeMaxSeconds * 1e6f,
4225 result.TotalMemoryAllocated,
4226 result.FreeRangeSizeAvg,
4227 result.FreeRangeSizeMax);
4228}
4229
4230static void WritePoolTestResultHeader(FILE* file)
4231{
4232 fprintf(file,
4233 "Code,Test,Time,"
4234 "Config,"
4235 "Total Time (us),"
4236 "Allocation Time Min (us),"
4237 "Allocation Time Avg (us),"
4238 "Allocation Time Max (us),"
4239 "Deallocation Time Min (us),"
4240 "Deallocation Time Avg (us),"
4241 "Deallocation Time Max (us),"
4242 "Lost Allocation Count,"
4243 "Lost Allocation Total Size (B),"
4244 "Failed Allocation Count,"
4245 "Failed Allocation Total Size (B)\n");
4246}
4247
4248static void WritePoolTestResult(
4249 FILE* file,
4250 const char* codeDescription,
4251 const char* testDescription,
4252 const PoolTestConfig& config,
4253 const PoolTestResult& result)
4254{
4255 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4256 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4257 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4258 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4259 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4260 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4261 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4262
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004263 std::string currTime;
4264 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004265
4266 fprintf(file,
4267 "%s,%s,%s,"
4268 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4269 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4270 // General
4271 codeDescription,
4272 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004273 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004274 // Config
4275 config.ThreadCount,
4276 (unsigned long long)config.PoolSize,
4277 config.FrameCount,
4278 config.TotalItemCount,
4279 config.UsedItemCountMin,
4280 config.UsedItemCountMax,
4281 config.ItemsToMakeUnusedPercent,
4282 // Results
4283 totalTimeSeconds * 1e6f,
4284 allocationTimeMinSeconds * 1e6f,
4285 allocationTimeAvgSeconds * 1e6f,
4286 allocationTimeMaxSeconds * 1e6f,
4287 deallocationTimeMinSeconds * 1e6f,
4288 deallocationTimeAvgSeconds * 1e6f,
4289 deallocationTimeMaxSeconds * 1e6f,
4290 result.LostAllocationCount,
4291 result.LostAllocationTotalSize,
4292 result.FailedAllocationCount,
4293 result.FailedAllocationTotalSize);
4294}
4295
4296static void PerformCustomMainTest(FILE* file)
4297{
4298 Config config{};
4299 config.RandSeed = 65735476;
4300 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4301 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4302 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4303 config.FreeOrder = FREE_ORDER::FORWARD;
4304 config.ThreadCount = 16;
4305 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004306 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004307
4308 // Buffers
4309 //config.AllocationSizes.push_back({4, 16, 1024});
4310 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4311
4312 // Images
4313 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4314 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4315
4316 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4317 config.AdditionalOperationCount = 1024;
4318
4319 Result result{};
4320 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004321 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004322 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4323}
4324
4325static void PerformCustomPoolTest(FILE* file)
4326{
4327 PoolTestConfig config;
4328 config.PoolSize = 100 * 1024 * 1024;
4329 config.RandSeed = 2345764;
4330 config.ThreadCount = 1;
4331 config.FrameCount = 200;
4332 config.ItemsToMakeUnusedPercent = 2;
4333
4334 AllocationSize allocSize = {};
4335 allocSize.BufferSizeMin = 1024;
4336 allocSize.BufferSizeMax = 1024 * 1024;
4337 allocSize.Probability = 1;
4338 config.AllocationSizes.push_back(allocSize);
4339
4340 allocSize.BufferSizeMin = 0;
4341 allocSize.BufferSizeMax = 0;
4342 allocSize.ImageSizeMin = 128;
4343 allocSize.ImageSizeMax = 1024;
4344 allocSize.Probability = 1;
4345 config.AllocationSizes.push_back(allocSize);
4346
4347 config.PoolSize = config.CalcAvgResourceSize() * 200;
4348 config.UsedItemCountMax = 160;
4349 config.TotalItemCount = config.UsedItemCountMax * 10;
4350 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4351
4352 g_MemoryAliasingWarningEnabled = false;
4353 PoolTestResult result = {};
4354 TestPool_Benchmark(result, config);
4355 g_MemoryAliasingWarningEnabled = true;
4356
4357 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4358}
4359
Adam Sawickib8333fb2018-03-13 16:15:53 +01004360static void PerformMainTests(FILE* file)
4361{
4362 uint32_t repeatCount = 1;
4363 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4364
4365 Config config{};
4366 config.RandSeed = 65735476;
4367 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4368 config.FreeOrder = FREE_ORDER::FORWARD;
4369
4370 size_t threadCountCount = 1;
4371 switch(ConfigType)
4372 {
4373 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4374 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4375 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4376 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4377 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4378 default: assert(0);
4379 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004380
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004381 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004382
Adam Sawickib8333fb2018-03-13 16:15:53 +01004383 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4384 {
4385 std::string desc1;
4386
4387 switch(threadCountIndex)
4388 {
4389 case 0:
4390 desc1 += "1_thread";
4391 config.ThreadCount = 1;
4392 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4393 break;
4394 case 1:
4395 desc1 += "16_threads+0%_common";
4396 config.ThreadCount = 16;
4397 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4398 break;
4399 case 2:
4400 desc1 += "16_threads+50%_common";
4401 config.ThreadCount = 16;
4402 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4403 break;
4404 case 3:
4405 desc1 += "16_threads+100%_common";
4406 config.ThreadCount = 16;
4407 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4408 break;
4409 case 4:
4410 desc1 += "2_threads+0%_common";
4411 config.ThreadCount = 2;
4412 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4413 break;
4414 case 5:
4415 desc1 += "2_threads+50%_common";
4416 config.ThreadCount = 2;
4417 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4418 break;
4419 case 6:
4420 desc1 += "2_threads+100%_common";
4421 config.ThreadCount = 2;
4422 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4423 break;
4424 default:
4425 assert(0);
4426 }
4427
4428 // 0 = buffers, 1 = images, 2 = buffers and images
4429 size_t buffersVsImagesCount = 2;
4430 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4431 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4432 {
4433 std::string desc2 = desc1;
4434 switch(buffersVsImagesIndex)
4435 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004436 case 0: desc2 += ",Buffers"; break;
4437 case 1: desc2 += ",Images"; break;
4438 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004439 default: assert(0);
4440 }
4441
4442 // 0 = small, 1 = large, 2 = small and large
4443 size_t smallVsLargeCount = 2;
4444 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4445 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4446 {
4447 std::string desc3 = desc2;
4448 switch(smallVsLargeIndex)
4449 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004450 case 0: desc3 += ",Small"; break;
4451 case 1: desc3 += ",Large"; break;
4452 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004453 default: assert(0);
4454 }
4455
4456 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4457 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4458 else
4459 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4460
4461 // 0 = varying sizes min...max, 1 = set of constant sizes
4462 size_t constantSizesCount = 1;
4463 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4464 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4465 {
4466 std::string desc4 = desc3;
4467 switch(constantSizesIndex)
4468 {
4469 case 0: desc4 += " Varying_sizes"; break;
4470 case 1: desc4 += " Constant_sizes"; break;
4471 default: assert(0);
4472 }
4473
4474 config.AllocationSizes.clear();
4475 // Buffers present
4476 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4477 {
4478 // Small
4479 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4480 {
4481 // Varying size
4482 if(constantSizesIndex == 0)
4483 config.AllocationSizes.push_back({4, 16, 1024});
4484 // Constant sizes
4485 else
4486 {
4487 config.AllocationSizes.push_back({1, 16, 16});
4488 config.AllocationSizes.push_back({1, 64, 64});
4489 config.AllocationSizes.push_back({1, 256, 256});
4490 config.AllocationSizes.push_back({1, 1024, 1024});
4491 }
4492 }
4493 // Large
4494 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4495 {
4496 // Varying size
4497 if(constantSizesIndex == 0)
4498 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4499 // Constant sizes
4500 else
4501 {
4502 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4503 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4504 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4505 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4506 }
4507 }
4508 }
4509 // Images present
4510 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4511 {
4512 // Small
4513 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4514 {
4515 // Varying size
4516 if(constantSizesIndex == 0)
4517 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4518 // Constant sizes
4519 else
4520 {
4521 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4522 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4523 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4524 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4525 }
4526 }
4527 // Large
4528 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4529 {
4530 // Varying size
4531 if(constantSizesIndex == 0)
4532 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4533 // Constant sizes
4534 else
4535 {
4536 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4537 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4538 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4539 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4540 }
4541 }
4542 }
4543
4544 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4545 size_t beginBytesToAllocateCount = 1;
4546 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4547 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4548 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4549 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4550 {
4551 std::string desc5 = desc4;
4552
4553 switch(beginBytesToAllocateIndex)
4554 {
4555 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004556 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004557 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4558 config.AdditionalOperationCount = 0;
4559 break;
4560 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004561 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004562 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4563 config.AdditionalOperationCount = 1024;
4564 break;
4565 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004566 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004567 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4568 config.AdditionalOperationCount = 1024;
4569 break;
4570 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004571 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004572 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4573 config.AdditionalOperationCount = 1024;
4574 break;
4575 default:
4576 assert(0);
4577 }
4578
Adam Sawicki0667e332018-08-24 17:26:44 +02004579 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004580 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004581 std::string desc6 = desc5;
4582 switch(strategyIndex)
4583 {
4584 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004585 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004586 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4587 break;
4588 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004589 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004590 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4591 break;
4592 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004593 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004594 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4595 break;
4596 default:
4597 assert(0);
4598 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004599
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004600 desc6 += ',';
4601 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004602
4603 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004604
4605 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4606 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004607 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004608
4609 Result result{};
4610 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004611 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004612 if(file)
4613 {
4614 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4615 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004616 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004617 }
4618 }
4619 }
4620 }
4621 }
4622 }
4623}
4624
4625static void PerformPoolTests(FILE* file)
4626{
4627 const size_t AVG_RESOURCES_PER_POOL = 300;
4628
4629 uint32_t repeatCount = 1;
4630 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4631
4632 PoolTestConfig config{};
4633 config.RandSeed = 2346343;
4634 config.FrameCount = 200;
4635 config.ItemsToMakeUnusedPercent = 2;
4636
4637 size_t threadCountCount = 1;
4638 switch(ConfigType)
4639 {
4640 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4641 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4642 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4643 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4644 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4645 default: assert(0);
4646 }
4647 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4648 {
4649 std::string desc1;
4650
4651 switch(threadCountIndex)
4652 {
4653 case 0:
4654 desc1 += "1_thread";
4655 config.ThreadCount = 1;
4656 break;
4657 case 1:
4658 desc1 += "16_threads";
4659 config.ThreadCount = 16;
4660 break;
4661 case 2:
4662 desc1 += "2_threads";
4663 config.ThreadCount = 2;
4664 break;
4665 default:
4666 assert(0);
4667 }
4668
4669 // 0 = buffers, 1 = images, 2 = buffers and images
4670 size_t buffersVsImagesCount = 2;
4671 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4672 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4673 {
4674 std::string desc2 = desc1;
4675 switch(buffersVsImagesIndex)
4676 {
4677 case 0: desc2 += " Buffers"; break;
4678 case 1: desc2 += " Images"; break;
4679 case 2: desc2 += " Buffers+Images"; break;
4680 default: assert(0);
4681 }
4682
4683 // 0 = small, 1 = large, 2 = small and large
4684 size_t smallVsLargeCount = 2;
4685 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4686 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4687 {
4688 std::string desc3 = desc2;
4689 switch(smallVsLargeIndex)
4690 {
4691 case 0: desc3 += " Small"; break;
4692 case 1: desc3 += " Large"; break;
4693 case 2: desc3 += " Small+Large"; break;
4694 default: assert(0);
4695 }
4696
4697 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4698 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4699 else
4700 config.PoolSize = 4ull * 1024 * 1024;
4701
4702 // 0 = varying sizes min...max, 1 = set of constant sizes
4703 size_t constantSizesCount = 1;
4704 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4705 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4706 {
4707 std::string desc4 = desc3;
4708 switch(constantSizesIndex)
4709 {
4710 case 0: desc4 += " Varying_sizes"; break;
4711 case 1: desc4 += " Constant_sizes"; break;
4712 default: assert(0);
4713 }
4714
4715 config.AllocationSizes.clear();
4716 // Buffers present
4717 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4718 {
4719 // Small
4720 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4721 {
4722 // Varying size
4723 if(constantSizesIndex == 0)
4724 config.AllocationSizes.push_back({4, 16, 1024});
4725 // Constant sizes
4726 else
4727 {
4728 config.AllocationSizes.push_back({1, 16, 16});
4729 config.AllocationSizes.push_back({1, 64, 64});
4730 config.AllocationSizes.push_back({1, 256, 256});
4731 config.AllocationSizes.push_back({1, 1024, 1024});
4732 }
4733 }
4734 // Large
4735 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4736 {
4737 // Varying size
4738 if(constantSizesIndex == 0)
4739 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4740 // Constant sizes
4741 else
4742 {
4743 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4744 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4745 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4746 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4747 }
4748 }
4749 }
4750 // Images present
4751 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4752 {
4753 // Small
4754 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4755 {
4756 // Varying size
4757 if(constantSizesIndex == 0)
4758 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4759 // Constant sizes
4760 else
4761 {
4762 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4763 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4764 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4765 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4766 }
4767 }
4768 // Large
4769 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4770 {
4771 // Varying size
4772 if(constantSizesIndex == 0)
4773 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4774 // Constant sizes
4775 else
4776 {
4777 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4778 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4779 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4780 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4781 }
4782 }
4783 }
4784
4785 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4786 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4787
4788 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4789 size_t subscriptionModeCount;
4790 switch(ConfigType)
4791 {
4792 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4793 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4794 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4795 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4796 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4797 default: assert(0);
4798 }
4799 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4800 {
4801 std::string desc5 = desc4;
4802
4803 switch(subscriptionModeIndex)
4804 {
4805 case 0:
4806 desc5 += " Subscription_66%";
4807 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4808 break;
4809 case 1:
4810 desc5 += " Subscription_133%";
4811 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4812 break;
4813 case 2:
4814 desc5 += " Subscription_100%";
4815 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4816 break;
4817 case 3:
4818 desc5 += " Subscription_33%";
4819 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4820 break;
4821 case 4:
4822 desc5 += " Subscription_166%";
4823 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4824 break;
4825 default:
4826 assert(0);
4827 }
4828
4829 config.TotalItemCount = config.UsedItemCountMax * 5;
4830 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4831
4832 const char* testDescription = desc5.c_str();
4833
4834 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4835 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004836 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004837
4838 PoolTestResult result{};
4839 g_MemoryAliasingWarningEnabled = false;
4840 TestPool_Benchmark(result, config);
4841 g_MemoryAliasingWarningEnabled = true;
4842 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4843 }
4844 }
4845 }
4846 }
4847 }
4848 }
4849}
4850
Adam Sawickia83793a2018-09-03 13:40:42 +02004851static void BasicTestBuddyAllocator()
4852{
4853 wprintf(L"Basic test buddy allocator\n");
4854
4855 RandomNumberGenerator rand{76543};
4856
4857 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4858 sampleBufCreateInfo.size = 1024; // Whatever.
4859 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4860
4861 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4862 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4863
4864 VmaPoolCreateInfo poolCreateInfo = {};
4865 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004866 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004867
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004868 // Deliberately adding 1023 to test usable size smaller than memory block size.
4869 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004870 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004871 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004872
4873 VmaPool pool = nullptr;
4874 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004875 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004876
4877 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4878
4879 VmaAllocationCreateInfo allocCreateInfo = {};
4880 allocCreateInfo.pool = pool;
4881
4882 std::vector<BufferInfo> bufInfo;
4883 BufferInfo newBufInfo;
4884 VmaAllocationInfo allocInfo;
4885
4886 bufCreateInfo.size = 1024 * 256;
4887 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4888 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004889 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004890 bufInfo.push_back(newBufInfo);
4891
4892 bufCreateInfo.size = 1024 * 512;
4893 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4894 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004895 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004896 bufInfo.push_back(newBufInfo);
4897
4898 bufCreateInfo.size = 1024 * 128;
4899 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4900 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004901 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004902 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004903
4904 // Test very small allocation, smaller than minimum node size.
4905 bufCreateInfo.size = 1;
4906 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4907 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004908 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004909 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004910
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004911 // Test some small allocation with alignment requirement.
4912 {
4913 VkMemoryRequirements memReq;
4914 memReq.alignment = 256;
4915 memReq.memoryTypeBits = UINT32_MAX;
4916 memReq.size = 32;
4917
4918 newBufInfo.Buffer = VK_NULL_HANDLE;
4919 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
4920 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004921 TEST(res == VK_SUCCESS);
4922 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004923 bufInfo.push_back(newBufInfo);
4924 }
4925
4926 //SaveAllocatorStatsToFile(L"TEST.json");
4927
Adam Sawicki21017c62018-09-07 15:26:59 +02004928 VmaPoolStats stats = {};
4929 vmaGetPoolStats(g_hAllocator, pool, &stats);
4930 int DBG = 0; // Set breakpoint here to inspect `stats`.
4931
Adam Sawicki80927152018-09-07 17:27:23 +02004932 // Allocate enough new buffers to surely fall into second block.
4933 for(uint32_t i = 0; i < 32; ++i)
4934 {
4935 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
4936 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4937 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004938 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02004939 bufInfo.push_back(newBufInfo);
4940 }
4941
4942 SaveAllocatorStatsToFile(L"BuddyTest01.json");
4943
Adam Sawickia83793a2018-09-03 13:40:42 +02004944 // Destroy the buffers in random order.
4945 while(!bufInfo.empty())
4946 {
4947 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
4948 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
4949 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
4950 bufInfo.erase(bufInfo.begin() + indexToDestroy);
4951 }
4952
4953 vmaDestroyPool(g_hAllocator, pool);
4954}
4955
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004956static void BasicTestAllocatePages()
4957{
4958 wprintf(L"Basic test allocate pages\n");
4959
4960 RandomNumberGenerator rand{765461};
4961
4962 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4963 sampleBufCreateInfo.size = 1024; // Whatever.
4964 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4965
4966 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4967 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4968
4969 VmaPoolCreateInfo poolCreateInfo = {};
4970 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02004971 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004972
4973 // 1 block of 1 MB.
4974 poolCreateInfo.blockSize = 1024 * 1024;
4975 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
4976
4977 // Create pool.
4978 VmaPool pool = nullptr;
4979 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02004980 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004981
4982 // Make 100 allocations of 4 KB - they should fit into the pool.
4983 VkMemoryRequirements memReq;
4984 memReq.memoryTypeBits = UINT32_MAX;
4985 memReq.alignment = 4 * 1024;
4986 memReq.size = 4 * 1024;
4987
4988 VmaAllocationCreateInfo allocCreateInfo = {};
4989 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
4990 allocCreateInfo.pool = pool;
4991
4992 constexpr uint32_t allocCount = 100;
4993
4994 std::vector<VmaAllocation> alloc{allocCount};
4995 std::vector<VmaAllocationInfo> allocInfo{allocCount};
4996 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02004997 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02004998 for(uint32_t i = 0; i < allocCount; ++i)
4999 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005000 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005001 allocInfo[i].pMappedData != nullptr &&
5002 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5003 allocInfo[i].memoryType == allocInfo[0].memoryType);
5004 }
5005
5006 // Free the allocations.
5007 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5008 std::fill(alloc.begin(), alloc.end(), nullptr);
5009 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5010
5011 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5012 // Also test optional allocationInfo = null.
5013 memReq.size = 100 * 1024;
5014 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005015 TEST(res != VK_SUCCESS);
5016 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005017
5018 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5019 memReq.size = 4 * 1024;
5020 memReq.alignment = 128 * 1024;
5021 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005022 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005023
5024 // Make 100 dedicated allocations of 4 KB.
5025 memReq.alignment = 4 * 1024;
5026 memReq.size = 4 * 1024;
5027
5028 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5029 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5030 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5031 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005032 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005033 for(uint32_t i = 0; i < allocCount; ++i)
5034 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005035 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005036 allocInfo[i].pMappedData != nullptr &&
5037 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5038 allocInfo[i].offset == 0);
5039 if(i > 0)
5040 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005041 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005042 }
5043 }
5044
5045 // Free the allocations.
5046 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5047 std::fill(alloc.begin(), alloc.end(), nullptr);
5048 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5049
5050 vmaDestroyPool(g_hAllocator, pool);
5051}
5052
Adam Sawickif2975342018-10-16 13:49:02 +02005053// Test the testing environment.
5054static void TestGpuData()
5055{
5056 RandomNumberGenerator rand = { 53434 };
5057
5058 std::vector<AllocInfo> allocInfo;
5059
5060 for(size_t i = 0; i < 100; ++i)
5061 {
5062 AllocInfo info = {};
5063
5064 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5065 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5066 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5067 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5068 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5069
5070 VmaAllocationCreateInfo allocCreateInfo = {};
5071 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5072
5073 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5074 TEST(res == VK_SUCCESS);
5075
5076 info.m_StartValue = rand.Generate();
5077
5078 allocInfo.push_back(std::move(info));
5079 }
5080
5081 UploadGpuData(allocInfo.data(), allocInfo.size());
5082
5083 ValidateGpuData(allocInfo.data(), allocInfo.size());
5084
5085 DestroyAllAllocations(allocInfo);
5086}
5087
Adam Sawickib8333fb2018-03-13 16:15:53 +01005088void Test()
5089{
5090 wprintf(L"TESTING:\n");
5091
Adam Sawicki5c8af7b2018-12-10 13:34:54 +01005092 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005093 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005094 ////////////////////////////////////////////////////////////////////////////////
5095 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02005096 return;
5097 }
5098
Adam Sawickib8333fb2018-03-13 16:15:53 +01005099 // # Simple tests
5100
5101 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005102 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005103#if VMA_DEBUG_MARGIN
5104 TestDebugMargin();
5105#else
5106 TestPool_SameSize();
5107 TestHeapSizeLimit();
5108#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005109#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5110 TestAllocationsInitialization();
5111#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005112 TestMapping();
Adam Sawickidaa6a552019-06-25 15:26:37 +02005113 TestDeviceLocalMapped();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005114 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005115 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005116 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005117 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005118
Adam Sawicki4338f662018-09-07 14:12:37 +02005119 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005120 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005121
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005122 {
5123 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005124 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005125 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005126 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005127 fclose(file);
5128 }
5129
Adam Sawickib8333fb2018-03-13 16:15:53 +01005130 TestDefragmentationSimple();
5131 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005132 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005133 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005134
5135 // # Detailed tests
5136 FILE* file;
5137 fopen_s(&file, "Results.csv", "w");
5138 assert(file != NULL);
5139
5140 WriteMainTestResultHeader(file);
5141 PerformMainTests(file);
5142 //PerformCustomMainTest(file);
5143
5144 WritePoolTestResultHeader(file);
5145 PerformPoolTests(file);
5146 //PerformCustomPoolTest(file);
5147
5148 fclose(file);
5149
5150 wprintf(L"Done.\n");
5151}
5152
Adam Sawickif1a793c2018-03-13 15:42:22 +01005153#endif // #ifdef _WIN32