blob: fbdbcd5f8990e334f85cbe4f45a03ef98c33d977 [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
2// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +010029
30#ifdef _WIN32
31
Adam Sawicki33d2ce72018-08-27 13:59:13 +020032static const char* CODE_DESCRIPTION = "Foo";
33
Adam Sawickif2975342018-10-16 13:49:02 +020034extern VkCommandBuffer g_hTemporaryCommandBuffer;
35void BeginSingleTimeCommands();
36void EndSingleTimeCommands();
37
Adam Sawickibdb89a92018-12-13 11:56:30 +010038#ifndef VMA_DEBUG_MARGIN
39 #define VMA_DEBUG_MARGIN 0
40#endif
41
Adam Sawicki0a607132018-08-24 11:18:41 +020042enum CONFIG_TYPE {
43 CONFIG_TYPE_MINIMUM,
44 CONFIG_TYPE_SMALL,
45 CONFIG_TYPE_AVERAGE,
46 CONFIG_TYPE_LARGE,
47 CONFIG_TYPE_MAXIMUM,
48 CONFIG_TYPE_COUNT
49};
50
Adam Sawickif2975342018-10-16 13:49:02 +020051static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
52//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020053
Adam Sawickib8333fb2018-03-13 16:15:53 +010054enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
55
Adam Sawicki0667e332018-08-24 17:26:44 +020056static const char* FREE_ORDER_NAMES[] = {
57 "FORWARD",
58 "BACKWARD",
59 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020060};
61
Adam Sawicki80927152018-09-07 17:27:23 +020062// Copy of internal VmaAlgorithmToStr.
63static const char* AlgorithmToStr(uint32_t algorithm)
64{
65 switch(algorithm)
66 {
67 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
68 return "Linear";
69 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
70 return "Buddy";
71 case 0:
72 return "Default";
73 default:
74 assert(0);
75 return "";
76 }
77}
78
Adam Sawickib8333fb2018-03-13 16:15:53 +010079struct AllocationSize
80{
81 uint32_t Probability;
82 VkDeviceSize BufferSizeMin, BufferSizeMax;
83 uint32_t ImageSizeMin, ImageSizeMax;
84};
85
86struct Config
87{
88 uint32_t RandSeed;
89 VkDeviceSize BeginBytesToAllocate;
90 uint32_t AdditionalOperationCount;
91 VkDeviceSize MaxBytesToAllocate;
92 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
93 std::vector<AllocationSize> AllocationSizes;
94 uint32_t ThreadCount;
95 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
96 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020097 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010098};
99
100struct Result
101{
102 duration TotalTime;
103 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
104 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
105 VkDeviceSize TotalMemoryAllocated;
106 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
107};
108
109void TestDefragmentationSimple();
110void TestDefragmentationFull();
111
112struct PoolTestConfig
113{
114 uint32_t RandSeed;
115 uint32_t ThreadCount;
116 VkDeviceSize PoolSize;
117 uint32_t FrameCount;
118 uint32_t TotalItemCount;
119 // Range for number of items used in each frame.
120 uint32_t UsedItemCountMin, UsedItemCountMax;
121 // Percent of items to make unused, and possibly make some others used in each frame.
122 uint32_t ItemsToMakeUnusedPercent;
123 std::vector<AllocationSize> AllocationSizes;
124
125 VkDeviceSize CalcAvgResourceSize() const
126 {
127 uint32_t probabilitySum = 0;
128 VkDeviceSize sizeSum = 0;
129 for(size_t i = 0; i < AllocationSizes.size(); ++i)
130 {
131 const AllocationSize& allocSize = AllocationSizes[i];
132 if(allocSize.BufferSizeMax > 0)
133 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
134 else
135 {
136 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
137 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
138 }
139 probabilitySum += allocSize.Probability;
140 }
141 return sizeSum / probabilitySum;
142 }
143
144 bool UsesBuffers() const
145 {
146 for(size_t i = 0; i < AllocationSizes.size(); ++i)
147 if(AllocationSizes[i].BufferSizeMax > 0)
148 return true;
149 return false;
150 }
151
152 bool UsesImages() const
153 {
154 for(size_t i = 0; i < AllocationSizes.size(); ++i)
155 if(AllocationSizes[i].ImageSizeMax > 0)
156 return true;
157 return false;
158 }
159};
160
161struct PoolTestResult
162{
163 duration TotalTime;
164 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
165 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
166 size_t LostAllocationCount, LostAllocationTotalSize;
167 size_t FailedAllocationCount, FailedAllocationTotalSize;
168};
169
170static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
171
Adam Sawicki51fa9662018-10-03 13:44:29 +0200172uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200173
Adam Sawickib8333fb2018-03-13 16:15:53 +0100174struct BufferInfo
175{
176 VkBuffer Buffer = VK_NULL_HANDLE;
177 VmaAllocation Allocation = VK_NULL_HANDLE;
178};
179
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200180static uint32_t GetAllocationStrategyCount()
181{
182 uint32_t strategyCount = 0;
183 switch(ConfigType)
184 {
185 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
186 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
187 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
188 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
189 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
190 default: assert(0);
191 }
192 return strategyCount;
193}
194
195static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
196{
197 switch(allocStrategy)
198 {
199 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
200 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
201 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
202 case 0: return "Default"; break;
203 default: assert(0); return "";
204 }
205}
206
Adam Sawickib8333fb2018-03-13 16:15:53 +0100207static void InitResult(Result& outResult)
208{
209 outResult.TotalTime = duration::zero();
210 outResult.AllocationTimeMin = duration::max();
211 outResult.AllocationTimeAvg = duration::zero();
212 outResult.AllocationTimeMax = duration::min();
213 outResult.DeallocationTimeMin = duration::max();
214 outResult.DeallocationTimeAvg = duration::zero();
215 outResult.DeallocationTimeMax = duration::min();
216 outResult.TotalMemoryAllocated = 0;
217 outResult.FreeRangeSizeAvg = 0;
218 outResult.FreeRangeSizeMax = 0;
219}
220
221class TimeRegisterObj
222{
223public:
224 TimeRegisterObj(duration& min, duration& sum, duration& max) :
225 m_Min(min),
226 m_Sum(sum),
227 m_Max(max),
228 m_TimeBeg(std::chrono::high_resolution_clock::now())
229 {
230 }
231
232 ~TimeRegisterObj()
233 {
234 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
235 m_Sum += d;
236 if(d < m_Min) m_Min = d;
237 if(d > m_Max) m_Max = d;
238 }
239
240private:
241 duration& m_Min;
242 duration& m_Sum;
243 duration& m_Max;
244 time_point m_TimeBeg;
245};
246
247struct PoolTestThreadResult
248{
249 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
250 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
251 size_t AllocationCount, DeallocationCount;
252 size_t LostAllocationCount, LostAllocationTotalSize;
253 size_t FailedAllocationCount, FailedAllocationTotalSize;
254};
255
256class AllocationTimeRegisterObj : public TimeRegisterObj
257{
258public:
259 AllocationTimeRegisterObj(Result& result) :
260 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
261 {
262 }
263};
264
265class DeallocationTimeRegisterObj : public TimeRegisterObj
266{
267public:
268 DeallocationTimeRegisterObj(Result& result) :
269 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
270 {
271 }
272};
273
274class PoolAllocationTimeRegisterObj : public TimeRegisterObj
275{
276public:
277 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
278 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
279 {
280 }
281};
282
283class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
284{
285public:
286 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
287 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
288 {
289 }
290};
291
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200292static void CurrentTimeToStr(std::string& out)
293{
294 time_t rawTime; time(&rawTime);
295 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
296 char timeStr[128];
297 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
298 out = timeStr;
299}
300
Adam Sawickib8333fb2018-03-13 16:15:53 +0100301VkResult MainTest(Result& outResult, const Config& config)
302{
303 assert(config.ThreadCount > 0);
304
305 InitResult(outResult);
306
307 RandomNumberGenerator mainRand{config.RandSeed};
308
309 time_point timeBeg = std::chrono::high_resolution_clock::now();
310
311 std::atomic<size_t> allocationCount = 0;
312 VkResult res = VK_SUCCESS;
313
314 uint32_t memUsageProbabilitySum =
315 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
316 config.MemUsageProbability[2] + config.MemUsageProbability[3];
317 assert(memUsageProbabilitySum > 0);
318
319 uint32_t allocationSizeProbabilitySum = std::accumulate(
320 config.AllocationSizes.begin(),
321 config.AllocationSizes.end(),
322 0u,
323 [](uint32_t sum, const AllocationSize& allocSize) {
324 return sum + allocSize.Probability;
325 });
326
327 struct Allocation
328 {
329 VkBuffer Buffer;
330 VkImage Image;
331 VmaAllocation Alloc;
332 };
333
334 std::vector<Allocation> commonAllocations;
335 std::mutex commonAllocationsMutex;
336
337 auto Allocate = [&](
338 VkDeviceSize bufferSize,
339 const VkExtent2D imageExtent,
340 RandomNumberGenerator& localRand,
341 VkDeviceSize& totalAllocatedBytes,
342 std::vector<Allocation>& allocations) -> VkResult
343 {
344 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
345
346 uint32_t memUsageIndex = 0;
347 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
348 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
349 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
350
351 VmaAllocationCreateInfo memReq = {};
352 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200353 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100354
355 Allocation allocation = {};
356 VmaAllocationInfo allocationInfo;
357
358 // Buffer
359 if(bufferSize > 0)
360 {
361 assert(imageExtent.width == 0);
362 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
363 bufferInfo.size = bufferSize;
364 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
365
366 {
367 AllocationTimeRegisterObj timeRegisterObj{outResult};
368 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
369 }
370 }
371 // Image
372 else
373 {
374 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
375 imageInfo.imageType = VK_IMAGE_TYPE_2D;
376 imageInfo.extent.width = imageExtent.width;
377 imageInfo.extent.height = imageExtent.height;
378 imageInfo.extent.depth = 1;
379 imageInfo.mipLevels = 1;
380 imageInfo.arrayLayers = 1;
381 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
382 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
383 VK_IMAGE_TILING_OPTIMAL :
384 VK_IMAGE_TILING_LINEAR;
385 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
386 switch(memReq.usage)
387 {
388 case VMA_MEMORY_USAGE_GPU_ONLY:
389 switch(localRand.Generate() % 3)
390 {
391 case 0:
392 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
393 break;
394 case 1:
395 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
396 break;
397 case 2:
398 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
399 break;
400 }
401 break;
402 case VMA_MEMORY_USAGE_CPU_ONLY:
403 case VMA_MEMORY_USAGE_CPU_TO_GPU:
404 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
405 break;
406 case VMA_MEMORY_USAGE_GPU_TO_CPU:
407 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
408 break;
409 }
410 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
411 imageInfo.flags = 0;
412
413 {
414 AllocationTimeRegisterObj timeRegisterObj{outResult};
415 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
416 }
417 }
418
419 if(res == VK_SUCCESS)
420 {
421 ++allocationCount;
422 totalAllocatedBytes += allocationInfo.size;
423 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
424 if(useCommonAllocations)
425 {
426 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
427 commonAllocations.push_back(allocation);
428 }
429 else
430 allocations.push_back(allocation);
431 }
432 else
433 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200434 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100435 }
436 return res;
437 };
438
439 auto GetNextAllocationSize = [&](
440 VkDeviceSize& outBufSize,
441 VkExtent2D& outImageSize,
442 RandomNumberGenerator& localRand)
443 {
444 outBufSize = 0;
445 outImageSize = {0, 0};
446
447 uint32_t allocSizeIndex = 0;
448 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
449 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
450 r -= config.AllocationSizes[allocSizeIndex++].Probability;
451
452 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
453 if(allocSize.BufferSizeMax > 0)
454 {
455 assert(allocSize.ImageSizeMax == 0);
456 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
457 outBufSize = allocSize.BufferSizeMin;
458 else
459 {
460 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
461 outBufSize = outBufSize / 16 * 16;
462 }
463 }
464 else
465 {
466 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
467 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
468 else
469 {
470 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
471 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
472 }
473 }
474 };
475
476 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
477 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
478
479 auto ThreadProc = [&](uint32_t randSeed) -> void
480 {
481 RandomNumberGenerator threadRand(randSeed);
482 VkDeviceSize threadTotalAllocatedBytes = 0;
483 std::vector<Allocation> threadAllocations;
484 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
485 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
486 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
487
488 // BEGIN ALLOCATIONS
489 for(;;)
490 {
491 VkDeviceSize bufferSize = 0;
492 VkExtent2D imageExtent = {};
493 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
494 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
495 threadBeginBytesToAllocate)
496 {
497 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
498 break;
499 }
500 else
501 break;
502 }
503
504 // ADDITIONAL ALLOCATIONS AND FREES
505 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
506 {
507 VkDeviceSize bufferSize = 0;
508 VkExtent2D imageExtent = {};
509 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
510
511 // true = allocate, false = free
512 bool allocate = threadRand.Generate() % 2 != 0;
513
514 if(allocate)
515 {
516 if(threadTotalAllocatedBytes +
517 bufferSize +
518 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
519 threadMaxBytesToAllocate)
520 {
521 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
522 break;
523 }
524 }
525 else
526 {
527 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
528 if(useCommonAllocations)
529 {
530 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
531 if(!commonAllocations.empty())
532 {
533 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
534 VmaAllocationInfo allocationInfo;
535 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
536 if(threadTotalAllocatedBytes >= allocationInfo.size)
537 {
538 DeallocationTimeRegisterObj timeRegisterObj{outResult};
539 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
540 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
541 else
542 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
543 threadTotalAllocatedBytes -= allocationInfo.size;
544 commonAllocations.erase(commonAllocations.begin() + indexToFree);
545 }
546 }
547 }
548 else
549 {
550 if(!threadAllocations.empty())
551 {
552 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
553 VmaAllocationInfo allocationInfo;
554 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
555 if(threadTotalAllocatedBytes >= allocationInfo.size)
556 {
557 DeallocationTimeRegisterObj timeRegisterObj{outResult};
558 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
559 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
560 else
561 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
562 threadTotalAllocatedBytes -= allocationInfo.size;
563 threadAllocations.erase(threadAllocations.begin() + indexToFree);
564 }
565 }
566 }
567 }
568 }
569
570 ++numThreadsReachedMaxAllocations;
571
572 WaitForSingleObject(threadsFinishEvent, INFINITE);
573
574 // DEALLOCATION
575 while(!threadAllocations.empty())
576 {
577 size_t indexToFree = 0;
578 switch(config.FreeOrder)
579 {
580 case FREE_ORDER::FORWARD:
581 indexToFree = 0;
582 break;
583 case FREE_ORDER::BACKWARD:
584 indexToFree = threadAllocations.size() - 1;
585 break;
586 case FREE_ORDER::RANDOM:
587 indexToFree = mainRand.Generate() % threadAllocations.size();
588 break;
589 }
590
591 {
592 DeallocationTimeRegisterObj timeRegisterObj{outResult};
593 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
594 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
595 else
596 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
597 }
598 threadAllocations.erase(threadAllocations.begin() + indexToFree);
599 }
600 };
601
602 uint32_t threadRandSeed = mainRand.Generate();
603 std::vector<std::thread> bkgThreads;
604 for(size_t i = 0; i < config.ThreadCount; ++i)
605 {
606 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
607 }
608
609 // Wait for threads reached max allocations
610 while(numThreadsReachedMaxAllocations < config.ThreadCount)
611 Sleep(0);
612
613 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
614 VmaStats vmaStats = {};
615 vmaCalculateStats(g_hAllocator, &vmaStats);
616 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
617 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
618 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
619
620 // Signal threads to deallocate
621 SetEvent(threadsFinishEvent);
622
623 // Wait for threads finished
624 for(size_t i = 0; i < bkgThreads.size(); ++i)
625 bkgThreads[i].join();
626 bkgThreads.clear();
627
628 CloseHandle(threadsFinishEvent);
629
630 // Deallocate remaining common resources
631 while(!commonAllocations.empty())
632 {
633 size_t indexToFree = 0;
634 switch(config.FreeOrder)
635 {
636 case FREE_ORDER::FORWARD:
637 indexToFree = 0;
638 break;
639 case FREE_ORDER::BACKWARD:
640 indexToFree = commonAllocations.size() - 1;
641 break;
642 case FREE_ORDER::RANDOM:
643 indexToFree = mainRand.Generate() % commonAllocations.size();
644 break;
645 }
646
647 {
648 DeallocationTimeRegisterObj timeRegisterObj{outResult};
649 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
650 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
651 else
652 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
653 }
654 commonAllocations.erase(commonAllocations.begin() + indexToFree);
655 }
656
657 if(allocationCount)
658 {
659 outResult.AllocationTimeAvg /= allocationCount;
660 outResult.DeallocationTimeAvg /= allocationCount;
661 }
662
663 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
664
665 return res;
666}
667
Adam Sawicki51fa9662018-10-03 13:44:29 +0200668void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100669{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100670 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100671 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200672 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100673 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200674 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100675}
676
677struct AllocInfo
678{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200679 VmaAllocation m_Allocation = VK_NULL_HANDLE;
680 VkBuffer m_Buffer = VK_NULL_HANDLE;
681 VkImage m_Image = VK_NULL_HANDLE;
682 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100683 union
684 {
685 VkBufferCreateInfo m_BufferInfo;
686 VkImageCreateInfo m_ImageInfo;
687 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200688
689 void CreateBuffer(
690 const VkBufferCreateInfo& bufCreateInfo,
691 const VmaAllocationCreateInfo& allocCreateInfo);
692 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100693};
694
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200695void AllocInfo::CreateBuffer(
696 const VkBufferCreateInfo& bufCreateInfo,
697 const VmaAllocationCreateInfo& allocCreateInfo)
698{
699 m_BufferInfo = bufCreateInfo;
700 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
701 TEST(res == VK_SUCCESS);
702}
703
704void AllocInfo::Destroy()
705{
706 if(m_Image)
707 {
708 vkDestroyImage(g_hDevice, m_Image, nullptr);
709 }
710 if(m_Buffer)
711 {
712 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
713 }
714 if(m_Allocation)
715 {
716 vmaFreeMemory(g_hAllocator, m_Allocation);
717 }
718}
719
Adam Sawickif2975342018-10-16 13:49:02 +0200720class StagingBufferCollection
721{
722public:
723 StagingBufferCollection() { }
724 ~StagingBufferCollection();
725 // Returns false if maximum total size of buffers would be exceeded.
726 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
727 void ReleaseAllBuffers();
728
729private:
730 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
731 struct BufInfo
732 {
733 VmaAllocation Allocation = VK_NULL_HANDLE;
734 VkBuffer Buffer = VK_NULL_HANDLE;
735 VkDeviceSize Size = VK_WHOLE_SIZE;
736 void* MappedPtr = nullptr;
737 bool Used = false;
738 };
739 std::vector<BufInfo> m_Bufs;
740 // Including both used and unused.
741 VkDeviceSize m_TotalSize = 0;
742};
743
744StagingBufferCollection::~StagingBufferCollection()
745{
746 for(size_t i = m_Bufs.size(); i--; )
747 {
748 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
749 }
750}
751
752bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
753{
754 assert(size <= MAX_TOTAL_SIZE);
755
756 // Try to find existing unused buffer with best size.
757 size_t bestIndex = SIZE_MAX;
758 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
759 {
760 BufInfo& currBufInfo = m_Bufs[i];
761 if(!currBufInfo.Used && currBufInfo.Size >= size &&
762 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
763 {
764 bestIndex = i;
765 }
766 }
767
768 if(bestIndex != SIZE_MAX)
769 {
770 m_Bufs[bestIndex].Used = true;
771 outBuffer = m_Bufs[bestIndex].Buffer;
772 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
773 return true;
774 }
775
776 // Allocate new buffer with requested size.
777 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
778 {
779 BufInfo bufInfo;
780 bufInfo.Size = size;
781 bufInfo.Used = true;
782
783 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
784 bufCreateInfo.size = size;
785 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
786
787 VmaAllocationCreateInfo allocCreateInfo = {};
788 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
789 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
790
791 VmaAllocationInfo allocInfo;
792 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
793 bufInfo.MappedPtr = allocInfo.pMappedData;
794 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
795
796 outBuffer = bufInfo.Buffer;
797 outMappedPtr = bufInfo.MappedPtr;
798
799 m_Bufs.push_back(std::move(bufInfo));
800
801 m_TotalSize += size;
802
803 return true;
804 }
805
806 // There are some unused but smaller buffers: Free them and try again.
807 bool hasUnused = false;
808 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
809 {
810 if(!m_Bufs[i].Used)
811 {
812 hasUnused = true;
813 break;
814 }
815 }
816 if(hasUnused)
817 {
818 for(size_t i = m_Bufs.size(); i--; )
819 {
820 if(!m_Bufs[i].Used)
821 {
822 m_TotalSize -= m_Bufs[i].Size;
823 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
824 m_Bufs.erase(m_Bufs.begin() + i);
825 }
826 }
827
828 return AcquireBuffer(size, outBuffer, outMappedPtr);
829 }
830
831 return false;
832}
833
834void StagingBufferCollection::ReleaseAllBuffers()
835{
836 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
837 {
838 m_Bufs[i].Used = false;
839 }
840}
841
842static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
843{
844 StagingBufferCollection stagingBufs;
845
846 bool cmdBufferStarted = false;
847 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
848 {
849 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
850 if(currAllocInfo.m_Buffer)
851 {
852 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
853
854 VkBuffer stagingBuf = VK_NULL_HANDLE;
855 void* stagingBufMappedPtr = nullptr;
856 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
857 {
858 TEST(cmdBufferStarted);
859 EndSingleTimeCommands();
860 stagingBufs.ReleaseAllBuffers();
861 cmdBufferStarted = false;
862
863 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
864 TEST(ok);
865 }
866
867 // Fill staging buffer.
868 {
869 assert(size % sizeof(uint32_t) == 0);
870 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
871 uint32_t val = currAllocInfo.m_StartValue;
872 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
873 {
874 *stagingValPtr = val;
875 ++stagingValPtr;
876 ++val;
877 }
878 }
879
880 // Issue copy command from staging buffer to destination buffer.
881 if(!cmdBufferStarted)
882 {
883 cmdBufferStarted = true;
884 BeginSingleTimeCommands();
885 }
886
887 VkBufferCopy copy = {};
888 copy.srcOffset = 0;
889 copy.dstOffset = 0;
890 copy.size = size;
891 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
892 }
893 else
894 {
895 TEST(0 && "Images not currently supported.");
896 }
897 }
898
899 if(cmdBufferStarted)
900 {
901 EndSingleTimeCommands();
902 stagingBufs.ReleaseAllBuffers();
903 }
904}
905
906static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
907{
908 StagingBufferCollection stagingBufs;
909
910 bool cmdBufferStarted = false;
911 size_t validateAllocIndexOffset = 0;
912 std::vector<void*> validateStagingBuffers;
913 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
914 {
915 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
916 if(currAllocInfo.m_Buffer)
917 {
918 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
919
920 VkBuffer stagingBuf = VK_NULL_HANDLE;
921 void* stagingBufMappedPtr = nullptr;
922 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
923 {
924 TEST(cmdBufferStarted);
925 EndSingleTimeCommands();
926 cmdBufferStarted = false;
927
928 for(size_t validateIndex = 0;
929 validateIndex < validateStagingBuffers.size();
930 ++validateIndex)
931 {
932 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
933 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
934 TEST(validateSize % sizeof(uint32_t) == 0);
935 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
936 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
937 bool valid = true;
938 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
939 {
940 if(*stagingValPtr != val)
941 {
942 valid = false;
943 break;
944 }
945 ++stagingValPtr;
946 ++val;
947 }
948 TEST(valid);
949 }
950
951 stagingBufs.ReleaseAllBuffers();
952
953 validateAllocIndexOffset = allocInfoIndex;
954 validateStagingBuffers.clear();
955
956 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
957 TEST(ok);
958 }
959
960 // Issue copy command from staging buffer to destination buffer.
961 if(!cmdBufferStarted)
962 {
963 cmdBufferStarted = true;
964 BeginSingleTimeCommands();
965 }
966
967 VkBufferCopy copy = {};
968 copy.srcOffset = 0;
969 copy.dstOffset = 0;
970 copy.size = size;
971 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
972
973 // Sava mapped pointer for later validation.
974 validateStagingBuffers.push_back(stagingBufMappedPtr);
975 }
976 else
977 {
978 TEST(0 && "Images not currently supported.");
979 }
980 }
981
982 if(cmdBufferStarted)
983 {
984 EndSingleTimeCommands();
985
986 for(size_t validateIndex = 0;
987 validateIndex < validateStagingBuffers.size();
988 ++validateIndex)
989 {
990 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
991 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
992 TEST(validateSize % sizeof(uint32_t) == 0);
993 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
994 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
995 bool valid = true;
996 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
997 {
998 if(*stagingValPtr != val)
999 {
1000 valid = false;
1001 break;
1002 }
1003 ++stagingValPtr;
1004 ++val;
1005 }
1006 TEST(valid);
1007 }
1008
1009 stagingBufs.ReleaseAllBuffers();
1010 }
1011}
1012
Adam Sawickib8333fb2018-03-13 16:15:53 +01001013static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1014{
1015 outMemReq = {};
1016 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1017 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1018}
1019
1020static void CreateBuffer(
1021 VmaPool pool,
1022 const VkBufferCreateInfo& bufCreateInfo,
1023 bool persistentlyMapped,
1024 AllocInfo& outAllocInfo)
1025{
1026 outAllocInfo = {};
1027 outAllocInfo.m_BufferInfo = bufCreateInfo;
1028
1029 VmaAllocationCreateInfo allocCreateInfo = {};
1030 allocCreateInfo.pool = pool;
1031 if(persistentlyMapped)
1032 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1033
1034 VmaAllocationInfo vmaAllocInfo = {};
1035 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1036
1037 // Setup StartValue and fill.
1038 {
1039 outAllocInfo.m_StartValue = (uint32_t)rand();
1040 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001041 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001042 if(!persistentlyMapped)
1043 {
1044 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1045 }
1046
1047 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001048 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001049 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1050 data[i] = value++;
1051
1052 if(!persistentlyMapped)
1053 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1054 }
1055}
1056
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001057static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001058{
1059 outAllocation.m_Allocation = nullptr;
1060 outAllocation.m_Buffer = nullptr;
1061 outAllocation.m_Image = nullptr;
1062 outAllocation.m_StartValue = (uint32_t)rand();
1063
1064 VmaAllocationCreateInfo vmaMemReq;
1065 GetMemReq(vmaMemReq);
1066
1067 VmaAllocationInfo allocInfo;
1068
1069 const bool isBuffer = true;//(rand() & 0x1) != 0;
1070 const bool isLarge = (rand() % 16) == 0;
1071 if(isBuffer)
1072 {
1073 const uint32_t bufferSize = isLarge ?
1074 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1075 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1076
1077 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1078 bufferInfo.size = bufferSize;
1079 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1080
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001081 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001082 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001083 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001084 }
1085 else
1086 {
1087 const uint32_t imageSizeX = isLarge ?
1088 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1089 rand() % 1024 + 1; // 1 ... 1024
1090 const uint32_t imageSizeY = isLarge ?
1091 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1092 rand() % 1024 + 1; // 1 ... 1024
1093
1094 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1095 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1096 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1097 imageInfo.extent.width = imageSizeX;
1098 imageInfo.extent.height = imageSizeY;
1099 imageInfo.extent.depth = 1;
1100 imageInfo.mipLevels = 1;
1101 imageInfo.arrayLayers = 1;
1102 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1103 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1104 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1105 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1106
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001107 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001108 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001109 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001110 }
1111
1112 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1113 if(allocInfo.pMappedData == nullptr)
1114 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001115 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001116 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001117 }
1118
1119 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001120 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001121 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1122 data[i] = value++;
1123
1124 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001125 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126}
1127
1128static void DestroyAllocation(const AllocInfo& allocation)
1129{
1130 if(allocation.m_Buffer)
1131 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1132 else
1133 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1134}
1135
1136static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1137{
1138 for(size_t i = allocations.size(); i--; )
1139 DestroyAllocation(allocations[i]);
1140 allocations.clear();
1141}
1142
1143static void ValidateAllocationData(const AllocInfo& allocation)
1144{
1145 VmaAllocationInfo allocInfo;
1146 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1147
1148 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1149 if(allocInfo.pMappedData == nullptr)
1150 {
1151 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001152 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001153 }
1154
1155 uint32_t value = allocation.m_StartValue;
1156 bool ok = true;
1157 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001158 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001159 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1160 {
1161 if(data[i] != value++)
1162 {
1163 ok = false;
1164 break;
1165 }
1166 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001167 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001168
1169 if(allocInfo.pMappedData == nullptr)
1170 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1171}
1172
1173static void RecreateAllocationResource(AllocInfo& allocation)
1174{
1175 VmaAllocationInfo allocInfo;
1176 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1177
1178 if(allocation.m_Buffer)
1179 {
1180 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1181
1182 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001183 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001184
1185 // Just to silence validation layer warnings.
1186 VkMemoryRequirements vkMemReq;
1187 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001188 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001189
1190 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001191 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001192 }
1193 else
1194 {
1195 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1196
1197 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001198 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001199
1200 // Just to silence validation layer warnings.
1201 VkMemoryRequirements vkMemReq;
1202 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1203
1204 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001205 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001206 }
1207}
1208
1209static void Defragment(AllocInfo* allocs, size_t allocCount,
1210 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1211 VmaDefragmentationStats* defragmentationStats = nullptr)
1212{
1213 std::vector<VmaAllocation> vmaAllocs(allocCount);
1214 for(size_t i = 0; i < allocCount; ++i)
1215 vmaAllocs[i] = allocs[i].m_Allocation;
1216
1217 std::vector<VkBool32> allocChanged(allocCount);
1218
1219 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1220 defragmentationInfo, defragmentationStats) );
1221
1222 for(size_t i = 0; i < allocCount; ++i)
1223 {
1224 if(allocChanged[i])
1225 {
1226 RecreateAllocationResource(allocs[i]);
1227 }
1228 }
1229}
1230
1231static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1232{
1233 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1234 ValidateAllocationData(allocInfo);
1235 });
1236}
1237
1238void TestDefragmentationSimple()
1239{
1240 wprintf(L"Test defragmentation simple\n");
1241
1242 RandomNumberGenerator rand(667);
1243
1244 const VkDeviceSize BUF_SIZE = 0x10000;
1245 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1246
1247 const VkDeviceSize MIN_BUF_SIZE = 32;
1248 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1249 auto RandomBufSize = [&]() -> VkDeviceSize {
1250 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1251 };
1252
1253 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1254 bufCreateInfo.size = BUF_SIZE;
1255 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1256
1257 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1258 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1259
1260 uint32_t memTypeIndex = UINT32_MAX;
1261 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1262
1263 VmaPoolCreateInfo poolCreateInfo = {};
1264 poolCreateInfo.blockSize = BLOCK_SIZE;
1265 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1266
1267 VmaPool pool;
1268 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1269
Adam Sawickie1681912018-11-23 17:50:12 +01001270 // Defragmentation of empty pool.
1271 {
1272 VmaDefragmentationInfo2 defragInfo = {};
1273 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1274 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1275 defragInfo.poolCount = 1;
1276 defragInfo.pPools = &pool;
1277
1278 VmaDefragmentationStats defragStats = {};
1279 VmaDefragmentationContext defragCtx = nullptr;
1280 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1281 TEST(res >= VK_SUCCESS);
1282 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1283 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1284 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1285 }
1286
Adam Sawickib8333fb2018-03-13 16:15:53 +01001287 std::vector<AllocInfo> allocations;
1288
1289 // persistentlyMappedOption = 0 - not persistently mapped.
1290 // persistentlyMappedOption = 1 - persistently mapped.
1291 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1292 {
1293 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1294 const bool persistentlyMapped = persistentlyMappedOption != 0;
1295
1296 // # Test 1
1297 // Buffers of fixed size.
1298 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1299 // Expected result: at least 1 block freed.
1300 {
1301 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1302 {
1303 AllocInfo allocInfo;
1304 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1305 allocations.push_back(allocInfo);
1306 }
1307
1308 for(size_t i = 1; i < allocations.size(); ++i)
1309 {
1310 DestroyAllocation(allocations[i]);
1311 allocations.erase(allocations.begin() + i);
1312 }
1313
1314 VmaDefragmentationStats defragStats;
1315 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001316 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1317 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001318
1319 ValidateAllocationsData(allocations.data(), allocations.size());
1320
1321 DestroyAllAllocations(allocations);
1322 }
1323
1324 // # Test 2
1325 // Buffers of fixed size.
1326 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1327 // Expected result: Each of 4 interations makes some progress.
1328 {
1329 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1330 {
1331 AllocInfo allocInfo;
1332 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1333 allocations.push_back(allocInfo);
1334 }
1335
1336 for(size_t i = 1; i < allocations.size(); ++i)
1337 {
1338 DestroyAllocation(allocations[i]);
1339 allocations.erase(allocations.begin() + i);
1340 }
1341
1342 VmaDefragmentationInfo defragInfo = {};
1343 defragInfo.maxAllocationsToMove = 1;
1344 defragInfo.maxBytesToMove = BUF_SIZE;
1345
1346 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1347 {
1348 VmaDefragmentationStats defragStats;
1349 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001350 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001351 }
1352
1353 ValidateAllocationsData(allocations.data(), allocations.size());
1354
1355 DestroyAllAllocations(allocations);
1356 }
1357
1358 // # Test 3
1359 // Buffers of variable size.
1360 // Create a number of buffers. Remove some percent of them.
1361 // Defragment while having some percent of them unmovable.
1362 // Expected result: Just simple validation.
1363 {
1364 for(size_t i = 0; i < 100; ++i)
1365 {
1366 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1367 localBufCreateInfo.size = RandomBufSize();
1368
1369 AllocInfo allocInfo;
1370 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1371 allocations.push_back(allocInfo);
1372 }
1373
1374 const uint32_t percentToDelete = 60;
1375 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1376 for(size_t i = 0; i < numberToDelete; ++i)
1377 {
1378 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1379 DestroyAllocation(allocations[indexToDelete]);
1380 allocations.erase(allocations.begin() + indexToDelete);
1381 }
1382
1383 // Non-movable allocations will be at the beginning of allocations array.
1384 const uint32_t percentNonMovable = 20;
1385 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1386 for(size_t i = 0; i < numberNonMovable; ++i)
1387 {
1388 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1389 if(indexNonMovable != i)
1390 std::swap(allocations[i], allocations[indexNonMovable]);
1391 }
1392
1393 VmaDefragmentationStats defragStats;
1394 Defragment(
1395 allocations.data() + numberNonMovable,
1396 allocations.size() - numberNonMovable,
1397 nullptr, &defragStats);
1398
1399 ValidateAllocationsData(allocations.data(), allocations.size());
1400
1401 DestroyAllAllocations(allocations);
1402 }
1403 }
1404
Adam Sawicki647cf242018-11-23 17:58:00 +01001405 /*
1406 Allocation that must be move to an overlapping place using memmove().
1407 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1408 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001409 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001410 {
1411 AllocInfo allocInfo[2];
1412
1413 bufCreateInfo.size = BUF_SIZE;
1414 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1415 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1416 bufCreateInfo.size = biggerBufSize;
1417 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1418
1419 DestroyAllocation(allocInfo[0]);
1420
1421 VmaDefragmentationStats defragStats;
1422 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1423 // If this fails, it means we couldn't do memmove with overlapping regions.
1424 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1425
1426 ValidateAllocationsData(&allocInfo[1], 1);
1427 DestroyAllocation(allocInfo[1]);
1428 }
1429
Adam Sawickib8333fb2018-03-13 16:15:53 +01001430 vmaDestroyPool(g_hAllocator, pool);
1431}
1432
Adam Sawicki52076eb2018-11-22 16:14:50 +01001433void TestDefragmentationWholePool()
1434{
1435 wprintf(L"Test defragmentation whole pool\n");
1436
1437 RandomNumberGenerator rand(668);
1438
1439 const VkDeviceSize BUF_SIZE = 0x10000;
1440 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1441
1442 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1443 bufCreateInfo.size = BUF_SIZE;
1444 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1445
1446 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1447 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1448
1449 uint32_t memTypeIndex = UINT32_MAX;
1450 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1451
1452 VmaPoolCreateInfo poolCreateInfo = {};
1453 poolCreateInfo.blockSize = BLOCK_SIZE;
1454 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1455
1456 VmaDefragmentationStats defragStats[2];
1457 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1458 {
1459 VmaPool pool;
1460 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1461
1462 std::vector<AllocInfo> allocations;
1463
1464 // Buffers of fixed size.
1465 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1466 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1467 {
1468 AllocInfo allocInfo;
1469 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1470 allocations.push_back(allocInfo);
1471 }
1472
1473 for(size_t i = 1; i < allocations.size(); ++i)
1474 {
1475 DestroyAllocation(allocations[i]);
1476 allocations.erase(allocations.begin() + i);
1477 }
1478
1479 VmaDefragmentationInfo2 defragInfo = {};
1480 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1481 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1482 std::vector<VmaAllocation> allocationsToDefrag;
1483 if(caseIndex == 0)
1484 {
1485 defragInfo.poolCount = 1;
1486 defragInfo.pPools = &pool;
1487 }
1488 else
1489 {
1490 const size_t allocCount = allocations.size();
1491 allocationsToDefrag.resize(allocCount);
1492 std::transform(
1493 allocations.begin(), allocations.end(),
1494 allocationsToDefrag.begin(),
1495 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1496 defragInfo.allocationCount = (uint32_t)allocCount;
1497 defragInfo.pAllocations = allocationsToDefrag.data();
1498 }
1499
1500 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1501 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1502 TEST(res >= VK_SUCCESS);
1503 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1504
1505 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1506
1507 ValidateAllocationsData(allocations.data(), allocations.size());
1508
1509 DestroyAllAllocations(allocations);
1510
1511 vmaDestroyPool(g_hAllocator, pool);
1512 }
1513
1514 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1515 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1516 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1517 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1518}
1519
Adam Sawickib8333fb2018-03-13 16:15:53 +01001520void TestDefragmentationFull()
1521{
1522 std::vector<AllocInfo> allocations;
1523
1524 // Create initial allocations.
1525 for(size_t i = 0; i < 400; ++i)
1526 {
1527 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001528 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001529 allocations.push_back(allocation);
1530 }
1531
1532 // Delete random allocations
1533 const size_t allocationsToDeletePercent = 80;
1534 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1535 for(size_t i = 0; i < allocationsToDelete; ++i)
1536 {
1537 size_t index = (size_t)rand() % allocations.size();
1538 DestroyAllocation(allocations[index]);
1539 allocations.erase(allocations.begin() + index);
1540 }
1541
1542 for(size_t i = 0; i < allocations.size(); ++i)
1543 ValidateAllocationData(allocations[i]);
1544
Adam Sawicki0667e332018-08-24 17:26:44 +02001545 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001546
1547 {
1548 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1549 for(size_t i = 0; i < allocations.size(); ++i)
1550 vmaAllocations[i] = allocations[i].m_Allocation;
1551
1552 const size_t nonMovablePercent = 0;
1553 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1554 for(size_t i = 0; i < nonMovableCount; ++i)
1555 {
1556 size_t index = (size_t)rand() % vmaAllocations.size();
1557 vmaAllocations.erase(vmaAllocations.begin() + index);
1558 }
1559
1560 const uint32_t defragCount = 1;
1561 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1562 {
1563 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1564
1565 VmaDefragmentationInfo defragmentationInfo;
1566 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1567 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1568
1569 wprintf(L"Defragmentation #%u\n", defragIndex);
1570
1571 time_point begTime = std::chrono::high_resolution_clock::now();
1572
1573 VmaDefragmentationStats stats;
1574 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001575 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001576
1577 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1578
1579 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1580 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1581 wprintf(L"Time: %.2f s\n", defragmentDuration);
1582
1583 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1584 {
1585 if(allocationsChanged[i])
1586 {
1587 RecreateAllocationResource(allocations[i]);
1588 }
1589 }
1590
1591 for(size_t i = 0; i < allocations.size(); ++i)
1592 ValidateAllocationData(allocations[i]);
1593
Adam Sawicki0667e332018-08-24 17:26:44 +02001594 //wchar_t fileName[MAX_PATH];
1595 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1596 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001597 }
1598 }
1599
1600 // Destroy all remaining allocations.
1601 DestroyAllAllocations(allocations);
1602}
1603
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001604static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001605{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001606 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001607 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001608
1609 std::vector<AllocInfo> allocations;
1610
1611 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001612 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1613 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001614 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001615 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1616 const size_t percentToLeave = 30;
1617 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001618 RandomNumberGenerator rand = { 234522 };
1619
1620 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001621
1622 VmaAllocationCreateInfo allocCreateInfo = {};
1623 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001624 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001625
1626 // Create all intended buffers.
1627 for(size_t i = 0; i < bufCount; ++i)
1628 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001629 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1630
1631 if(rand.Generate() % 100 < percentNonMovable)
1632 {
1633 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1634 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1635 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1636 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1637 }
1638 else
1639 {
1640 // Different usage just to see different color in output from VmaDumpVis.
1641 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1642 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1643 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1644 // And in JSON dump.
1645 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1646 }
1647
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001648 AllocInfo alloc;
1649 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1650 alloc.m_StartValue = rand.Generate();
1651 allocations.push_back(alloc);
1652 }
1653
1654 // Destroy some percentage of them.
1655 {
1656 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1657 for(size_t i = 0; i < buffersToDestroy; ++i)
1658 {
1659 const size_t index = rand.Generate() % allocations.size();
1660 allocations[index].Destroy();
1661 allocations.erase(allocations.begin() + index);
1662 }
1663 }
1664
1665 // Fill them with meaningful data.
1666 UploadGpuData(allocations.data(), allocations.size());
1667
Adam Sawickic6ede152018-11-16 17:04:14 +01001668 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001669 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001670 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001671
1672 // Defragment using GPU only.
1673 {
1674 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001675
Adam Sawickic6ede152018-11-16 17:04:14 +01001676 std::vector<VmaAllocation> allocationPtrs;
1677 std::vector<VkBool32> allocationChanged;
1678 std::vector<size_t> allocationOriginalIndex;
1679
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001680 for(size_t i = 0; i < allocCount; ++i)
1681 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001682 VmaAllocationInfo allocInfo = {};
1683 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1684 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1685 {
1686 allocationPtrs.push_back(allocations[i].m_Allocation);
1687 allocationChanged.push_back(VK_FALSE);
1688 allocationOriginalIndex.push_back(i);
1689 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001690 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001691
1692 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001693
1694 BeginSingleTimeCommands();
1695
1696 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001697 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001698 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001699 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001700 defragInfo.pAllocationsChanged = allocationChanged.data();
1701 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001702 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1703 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1704
1705 VmaDefragmentationStats stats = {};
1706 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1707 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1708 TEST(res >= VK_SUCCESS);
1709
1710 EndSingleTimeCommands();
1711
1712 vmaDefragmentationEnd(g_hAllocator, ctx);
1713
Adam Sawickic6ede152018-11-16 17:04:14 +01001714 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001715 {
1716 if(allocationChanged[i])
1717 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001718 const size_t origAllocIndex = allocationOriginalIndex[i];
1719 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001720 }
1721 }
1722
Adam Sawicki4d844e22019-01-24 16:21:05 +01001723 // If corruption detection is enabled, GPU defragmentation may not work on
1724 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001725 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001726 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1727 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001728 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001729 }
1730
1731 ValidateGpuData(allocations.data(), allocations.size());
1732
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001733 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001734 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001735
1736 // Destroy all remaining buffers.
1737 for(size_t i = allocations.size(); i--; )
1738 {
1739 allocations[i].Destroy();
1740 }
Adam Sawicki05704002018-11-08 16:07:29 +01001741
1742 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001743}
1744
Adam Sawickib8333fb2018-03-13 16:15:53 +01001745static void TestUserData()
1746{
1747 VkResult res;
1748
1749 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1750 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1751 bufCreateInfo.size = 0x10000;
1752
1753 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1754 {
1755 // Opaque pointer
1756 {
1757
1758 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1759 void* pointerToSomething = &res;
1760
1761 VmaAllocationCreateInfo allocCreateInfo = {};
1762 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1763 allocCreateInfo.pUserData = numberAsPointer;
1764 if(testIndex == 1)
1765 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1766
1767 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1768 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001769 TEST(res == VK_SUCCESS);
1770 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001771
1772 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001773 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001774
1775 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1776 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001777 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001778
1779 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1780 }
1781
1782 // String
1783 {
1784 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1785 const char* name2 = "2";
1786 const size_t name1Len = strlen(name1);
1787
1788 char* name1Buf = new char[name1Len + 1];
1789 strcpy_s(name1Buf, name1Len + 1, name1);
1790
1791 VmaAllocationCreateInfo allocCreateInfo = {};
1792 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1793 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1794 allocCreateInfo.pUserData = name1Buf;
1795 if(testIndex == 1)
1796 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1797
1798 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1799 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001800 TEST(res == VK_SUCCESS);
1801 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1802 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001803
1804 delete[] name1Buf;
1805
1806 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001807 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001808
1809 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1810 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001811 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001812
1813 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1814 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001815 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001816
1817 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1818 }
1819 }
1820}
1821
Adam Sawicki370ab182018-11-08 16:31:00 +01001822static void TestInvalidAllocations()
1823{
1824 VkResult res;
1825
1826 VmaAllocationCreateInfo allocCreateInfo = {};
1827 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1828
1829 // Try to allocate 0 bytes.
1830 {
1831 VkMemoryRequirements memReq = {};
1832 memReq.size = 0; // !!!
1833 memReq.alignment = 4;
1834 memReq.memoryTypeBits = UINT32_MAX;
1835 VmaAllocation alloc = VK_NULL_HANDLE;
1836 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1837 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1838 }
1839
1840 // Try to create buffer with size = 0.
1841 {
1842 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1843 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1844 bufCreateInfo.size = 0; // !!!
1845 VkBuffer buf = VK_NULL_HANDLE;
1846 VmaAllocation alloc = VK_NULL_HANDLE;
1847 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1848 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1849 }
1850
1851 // Try to create image with one dimension = 0.
1852 {
1853 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1854 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1855 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1856 imageCreateInfo.extent.width = 128;
1857 imageCreateInfo.extent.height = 0; // !!!
1858 imageCreateInfo.extent.depth = 1;
1859 imageCreateInfo.mipLevels = 1;
1860 imageCreateInfo.arrayLayers = 1;
1861 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1862 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1863 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1864 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1865 VkImage image = VK_NULL_HANDLE;
1866 VmaAllocation alloc = VK_NULL_HANDLE;
1867 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1868 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1869 }
1870}
1871
Adam Sawickib8333fb2018-03-13 16:15:53 +01001872static void TestMemoryRequirements()
1873{
1874 VkResult res;
1875 VkBuffer buf;
1876 VmaAllocation alloc;
1877 VmaAllocationInfo allocInfo;
1878
1879 const VkPhysicalDeviceMemoryProperties* memProps;
1880 vmaGetMemoryProperties(g_hAllocator, &memProps);
1881
1882 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1883 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1884 bufInfo.size = 128;
1885
1886 VmaAllocationCreateInfo allocCreateInfo = {};
1887
1888 // No requirements.
1889 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001890 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001891 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1892
1893 // Usage.
1894 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1895 allocCreateInfo.requiredFlags = 0;
1896 allocCreateInfo.preferredFlags = 0;
1897 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1898
1899 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001900 TEST(res == VK_SUCCESS);
1901 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001902 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1903
1904 // Required flags, preferred flags.
1905 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1906 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1907 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1908 allocCreateInfo.memoryTypeBits = 0;
1909
1910 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001911 TEST(res == VK_SUCCESS);
1912 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1913 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001914 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1915
1916 // memoryTypeBits.
1917 const uint32_t memType = allocInfo.memoryType;
1918 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1919 allocCreateInfo.requiredFlags = 0;
1920 allocCreateInfo.preferredFlags = 0;
1921 allocCreateInfo.memoryTypeBits = 1u << memType;
1922
1923 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001924 TEST(res == VK_SUCCESS);
1925 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001926 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1927
1928}
1929
1930static void TestBasics()
1931{
1932 VkResult res;
1933
1934 TestMemoryRequirements();
1935
1936 // Lost allocation
1937 {
1938 VmaAllocation alloc = VK_NULL_HANDLE;
1939 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001940 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001941
1942 VmaAllocationInfo allocInfo;
1943 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001944 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1945 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001946
1947 vmaFreeMemory(g_hAllocator, alloc);
1948 }
1949
1950 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1951 {
1952 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1953 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1954 bufCreateInfo.size = 128;
1955
1956 VmaAllocationCreateInfo allocCreateInfo = {};
1957 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1958 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1959
1960 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1961 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001962 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001963
1964 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1965
1966 // Same with OWN_MEMORY.
1967 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1968
1969 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001970 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001971
1972 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1973 }
1974
1975 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001976
1977 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001978}
1979
1980void TestHeapSizeLimit()
1981{
1982 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1983 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1984
1985 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1986 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1987 {
1988 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1989 }
1990
1991 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1992 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1993 allocatorCreateInfo.device = g_hDevice;
1994 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1995
1996 VmaAllocator hAllocator;
1997 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001998 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001999
2000 struct Item
2001 {
2002 VkBuffer hBuf;
2003 VmaAllocation hAlloc;
2004 };
2005 std::vector<Item> items;
2006
2007 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2008 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2009
2010 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
2011 VmaAllocationInfo ownAllocInfo;
2012 {
2013 VmaAllocationCreateInfo allocCreateInfo = {};
2014 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2015 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2016
2017 bufCreateInfo.size = BLOCK_SIZE / 2;
2018
2019 for(size_t i = 0; i < 2; ++i)
2020 {
2021 Item item;
2022 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002023 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002024 items.push_back(item);
2025 }
2026 }
2027
2028 // Create pool to make sure allocations must be out of this memory type.
2029 VmaPoolCreateInfo poolCreateInfo = {};
2030 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
2031 poolCreateInfo.blockSize = BLOCK_SIZE;
2032
2033 VmaPool hPool;
2034 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002035 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002036
2037 // 2. Allocate normal buffers from all the remaining memory.
2038 {
2039 VmaAllocationCreateInfo allocCreateInfo = {};
2040 allocCreateInfo.pool = hPool;
2041
2042 bufCreateInfo.size = BLOCK_SIZE / 2;
2043
2044 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2045 for(size_t i = 0; i < bufCount; ++i)
2046 {
2047 Item item;
2048 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002049 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002050 items.push_back(item);
2051 }
2052 }
2053
2054 // 3. Allocation of one more (even small) buffer should fail.
2055 {
2056 VmaAllocationCreateInfo allocCreateInfo = {};
2057 allocCreateInfo.pool = hPool;
2058
2059 bufCreateInfo.size = 128;
2060
2061 VkBuffer hBuf;
2062 VmaAllocation hAlloc;
2063 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002064 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002065 }
2066
2067 // Destroy everything.
2068 for(size_t i = items.size(); i--; )
2069 {
2070 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2071 }
2072
2073 vmaDestroyPool(hAllocator, hPool);
2074
2075 vmaDestroyAllocator(hAllocator);
2076}
2077
Adam Sawicki212a4a62018-06-14 15:44:45 +02002078#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002079static void TestDebugMargin()
2080{
2081 if(VMA_DEBUG_MARGIN == 0)
2082 {
2083 return;
2084 }
2085
2086 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002087 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002088
2089 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002090 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002091
2092 // Create few buffers of different size.
2093 const size_t BUF_COUNT = 10;
2094 BufferInfo buffers[BUF_COUNT];
2095 VmaAllocationInfo allocInfo[BUF_COUNT];
2096 for(size_t i = 0; i < 10; ++i)
2097 {
2098 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002099 // Last one will be mapped.
2100 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002101
2102 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002103 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002104 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002105 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002106
2107 if(i == BUF_COUNT - 1)
2108 {
2109 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002110 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002111 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2112 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2113 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002114 }
2115
2116 // Check if their offsets preserve margin between them.
2117 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2118 {
2119 if(lhs.deviceMemory != rhs.deviceMemory)
2120 {
2121 return lhs.deviceMemory < rhs.deviceMemory;
2122 }
2123 return lhs.offset < rhs.offset;
2124 });
2125 for(size_t i = 1; i < BUF_COUNT; ++i)
2126 {
2127 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2128 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002129 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002130 }
2131 }
2132
Adam Sawicki212a4a62018-06-14 15:44:45 +02002133 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002134 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002135
Adam Sawicki73b16652018-06-11 16:39:25 +02002136 // Destroy all buffers.
2137 for(size_t i = BUF_COUNT; i--; )
2138 {
2139 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2140 }
2141}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002142#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002143
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002144static void TestLinearAllocator()
2145{
2146 wprintf(L"Test linear allocator\n");
2147
2148 RandomNumberGenerator rand{645332};
2149
2150 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2151 sampleBufCreateInfo.size = 1024; // Whatever.
2152 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2153
2154 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2155 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2156
2157 VmaPoolCreateInfo poolCreateInfo = {};
2158 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002159 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002160
Adam Sawickiee082772018-06-20 17:45:49 +02002161 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002162 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2163 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2164
2165 VmaPool pool = nullptr;
2166 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002167 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002168
2169 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2170
2171 VmaAllocationCreateInfo allocCreateInfo = {};
2172 allocCreateInfo.pool = pool;
2173
2174 constexpr size_t maxBufCount = 100;
2175 std::vector<BufferInfo> bufInfo;
2176
2177 constexpr VkDeviceSize bufSizeMin = 16;
2178 constexpr VkDeviceSize bufSizeMax = 1024;
2179 VmaAllocationInfo allocInfo;
2180 VkDeviceSize prevOffset = 0;
2181
2182 // Test one-time free.
2183 for(size_t i = 0; i < 2; ++i)
2184 {
2185 // Allocate number of buffers of varying size that surely fit into this block.
2186 VkDeviceSize bufSumSize = 0;
2187 for(size_t i = 0; i < maxBufCount; ++i)
2188 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002189 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002190 BufferInfo newBufInfo;
2191 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2192 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002193 TEST(res == VK_SUCCESS);
2194 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002195 bufInfo.push_back(newBufInfo);
2196 prevOffset = allocInfo.offset;
2197 bufSumSize += bufCreateInfo.size;
2198 }
2199
2200 // Validate pool stats.
2201 VmaPoolStats stats;
2202 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002203 TEST(stats.size == poolCreateInfo.blockSize);
2204 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2205 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002206
2207 // Destroy the buffers in random order.
2208 while(!bufInfo.empty())
2209 {
2210 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2211 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2212 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2213 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2214 }
2215 }
2216
2217 // Test stack.
2218 {
2219 // Allocate number of buffers of varying size that surely fit into this block.
2220 for(size_t i = 0; i < maxBufCount; ++i)
2221 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002222 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002223 BufferInfo newBufInfo;
2224 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2225 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002226 TEST(res == VK_SUCCESS);
2227 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002228 bufInfo.push_back(newBufInfo);
2229 prevOffset = allocInfo.offset;
2230 }
2231
2232 // Destroy few buffers from top of the stack.
2233 for(size_t i = 0; i < maxBufCount / 5; ++i)
2234 {
2235 const BufferInfo& currBufInfo = bufInfo.back();
2236 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2237 bufInfo.pop_back();
2238 }
2239
2240 // Create some more
2241 for(size_t i = 0; i < maxBufCount / 5; ++i)
2242 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002243 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002244 BufferInfo newBufInfo;
2245 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2246 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002247 TEST(res == VK_SUCCESS);
2248 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002249 bufInfo.push_back(newBufInfo);
2250 prevOffset = allocInfo.offset;
2251 }
2252
2253 // Destroy the buffers in reverse order.
2254 while(!bufInfo.empty())
2255 {
2256 const BufferInfo& currBufInfo = bufInfo.back();
2257 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2258 bufInfo.pop_back();
2259 }
2260 }
2261
Adam Sawickiee082772018-06-20 17:45:49 +02002262 // Test ring buffer.
2263 {
2264 // Allocate number of buffers that surely fit into this block.
2265 bufCreateInfo.size = bufSizeMax;
2266 for(size_t i = 0; i < maxBufCount; ++i)
2267 {
2268 BufferInfo newBufInfo;
2269 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2270 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002271 TEST(res == VK_SUCCESS);
2272 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002273 bufInfo.push_back(newBufInfo);
2274 prevOffset = allocInfo.offset;
2275 }
2276
2277 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2278 const size_t buffersPerIter = maxBufCount / 10 - 1;
2279 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2280 for(size_t iter = 0; iter < iterCount; ++iter)
2281 {
2282 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2283 {
2284 const BufferInfo& currBufInfo = bufInfo.front();
2285 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2286 bufInfo.erase(bufInfo.begin());
2287 }
2288 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2289 {
2290 BufferInfo newBufInfo;
2291 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2292 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002293 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002294 bufInfo.push_back(newBufInfo);
2295 }
2296 }
2297
2298 // Allocate buffers until we reach out-of-memory.
2299 uint32_t debugIndex = 0;
2300 while(res == VK_SUCCESS)
2301 {
2302 BufferInfo newBufInfo;
2303 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2304 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2305 if(res == VK_SUCCESS)
2306 {
2307 bufInfo.push_back(newBufInfo);
2308 }
2309 else
2310 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002311 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002312 }
2313 ++debugIndex;
2314 }
2315
2316 // Destroy the buffers in random order.
2317 while(!bufInfo.empty())
2318 {
2319 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2320 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2321 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2322 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2323 }
2324 }
2325
Adam Sawicki680b2252018-08-22 14:47:32 +02002326 // Test double stack.
2327 {
2328 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2329 VkDeviceSize prevOffsetLower = 0;
2330 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2331 for(size_t i = 0; i < maxBufCount; ++i)
2332 {
2333 const bool upperAddress = (i % 2) != 0;
2334 if(upperAddress)
2335 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2336 else
2337 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002338 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002339 BufferInfo newBufInfo;
2340 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2341 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002342 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002343 if(upperAddress)
2344 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002345 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002346 prevOffsetUpper = allocInfo.offset;
2347 }
2348 else
2349 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002350 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002351 prevOffsetLower = allocInfo.offset;
2352 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002353 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002354 bufInfo.push_back(newBufInfo);
2355 }
2356
2357 // Destroy few buffers from top of the stack.
2358 for(size_t i = 0; i < maxBufCount / 5; ++i)
2359 {
2360 const BufferInfo& currBufInfo = bufInfo.back();
2361 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2362 bufInfo.pop_back();
2363 }
2364
2365 // Create some more
2366 for(size_t i = 0; i < maxBufCount / 5; ++i)
2367 {
2368 const bool upperAddress = (i % 2) != 0;
2369 if(upperAddress)
2370 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2371 else
2372 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002373 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002374 BufferInfo newBufInfo;
2375 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2376 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002377 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002378 bufInfo.push_back(newBufInfo);
2379 }
2380
2381 // Destroy the buffers in reverse order.
2382 while(!bufInfo.empty())
2383 {
2384 const BufferInfo& currBufInfo = bufInfo.back();
2385 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2386 bufInfo.pop_back();
2387 }
2388
2389 // Create buffers on both sides until we reach out of memory.
2390 prevOffsetLower = 0;
2391 prevOffsetUpper = poolCreateInfo.blockSize;
2392 res = VK_SUCCESS;
2393 for(size_t i = 0; res == VK_SUCCESS; ++i)
2394 {
2395 const bool upperAddress = (i % 2) != 0;
2396 if(upperAddress)
2397 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2398 else
2399 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002400 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002401 BufferInfo newBufInfo;
2402 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2403 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2404 if(res == VK_SUCCESS)
2405 {
2406 if(upperAddress)
2407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002408 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002409 prevOffsetUpper = allocInfo.offset;
2410 }
2411 else
2412 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002413 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002414 prevOffsetLower = allocInfo.offset;
2415 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002416 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002417 bufInfo.push_back(newBufInfo);
2418 }
2419 }
2420
2421 // Destroy the buffers in random order.
2422 while(!bufInfo.empty())
2423 {
2424 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2425 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2426 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2427 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2428 }
2429
2430 // Create buffers on upper side only, constant size, until we reach out of memory.
2431 prevOffsetUpper = poolCreateInfo.blockSize;
2432 res = VK_SUCCESS;
2433 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2434 bufCreateInfo.size = bufSizeMax;
2435 for(size_t i = 0; res == VK_SUCCESS; ++i)
2436 {
2437 BufferInfo newBufInfo;
2438 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2439 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2440 if(res == VK_SUCCESS)
2441 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002442 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002443 prevOffsetUpper = allocInfo.offset;
2444 bufInfo.push_back(newBufInfo);
2445 }
2446 }
2447
2448 // Destroy the buffers in reverse order.
2449 while(!bufInfo.empty())
2450 {
2451 const BufferInfo& currBufInfo = bufInfo.back();
2452 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2453 bufInfo.pop_back();
2454 }
2455 }
2456
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002457 // Test ring buffer with lost allocations.
2458 {
2459 // Allocate number of buffers until pool is full.
2460 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2461 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2462 res = VK_SUCCESS;
2463 for(size_t i = 0; res == VK_SUCCESS; ++i)
2464 {
2465 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2466
Adam Sawickifd366b62019-01-24 15:26:43 +01002467 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002468
2469 BufferInfo newBufInfo;
2470 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2471 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2472 if(res == VK_SUCCESS)
2473 bufInfo.push_back(newBufInfo);
2474 }
2475
2476 // Free first half of it.
2477 {
2478 const size_t buffersToDelete = bufInfo.size() / 2;
2479 for(size_t i = 0; i < buffersToDelete; ++i)
2480 {
2481 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2482 }
2483 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2484 }
2485
2486 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002487 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002488 res = VK_SUCCESS;
2489 for(size_t i = 0; res == VK_SUCCESS; ++i)
2490 {
2491 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2492
Adam Sawickifd366b62019-01-24 15:26:43 +01002493 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002494
2495 BufferInfo newBufInfo;
2496 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2497 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2498 if(res == VK_SUCCESS)
2499 bufInfo.push_back(newBufInfo);
2500 }
2501
2502 VkDeviceSize firstNewOffset;
2503 {
2504 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2505
2506 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2507 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2508 bufCreateInfo.size = bufSizeMax;
2509
2510 BufferInfo newBufInfo;
2511 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2512 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002513 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002514 bufInfo.push_back(newBufInfo);
2515 firstNewOffset = allocInfo.offset;
2516
2517 // Make sure at least one buffer from the beginning became lost.
2518 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002519 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002520 }
2521
Adam Sawickifd366b62019-01-24 15:26:43 +01002522#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002523 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2524 size_t newCount = 1;
2525 for(;;)
2526 {
2527 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2528
Adam Sawickifd366b62019-01-24 15:26:43 +01002529 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002530
2531 BufferInfo newBufInfo;
2532 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2533 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01002534
Adam Sawickib8d34d52018-10-03 17:41:20 +02002535 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002536 bufInfo.push_back(newBufInfo);
2537 ++newCount;
2538 if(allocInfo.offset < firstNewOffset)
2539 break;
2540 }
Adam Sawickifd366b62019-01-24 15:26:43 +01002541#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002542
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002543 // Delete buffers that are lost.
2544 for(size_t i = bufInfo.size(); i--; )
2545 {
2546 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2547 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2548 {
2549 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2550 bufInfo.erase(bufInfo.begin() + i);
2551 }
2552 }
2553
2554 // Test vmaMakePoolAllocationsLost
2555 {
2556 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2557
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01002558 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002559 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002560 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002561
2562 size_t realLostAllocCount = 0;
2563 for(size_t i = 0; i < bufInfo.size(); ++i)
2564 {
2565 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2566 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2567 ++realLostAllocCount;
2568 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002569 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002570 }
2571
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002572 // Destroy all the buffers in forward order.
2573 for(size_t i = 0; i < bufInfo.size(); ++i)
2574 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2575 bufInfo.clear();
2576 }
2577
Adam Sawicki70a683e2018-08-24 15:36:32 +02002578 vmaDestroyPool(g_hAllocator, pool);
2579}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002580
Adam Sawicki70a683e2018-08-24 15:36:32 +02002581static void TestLinearAllocatorMultiBlock()
2582{
2583 wprintf(L"Test linear allocator multi block\n");
2584
2585 RandomNumberGenerator rand{345673};
2586
2587 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2588 sampleBufCreateInfo.size = 1024 * 1024;
2589 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2590
2591 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2592 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2593
2594 VmaPoolCreateInfo poolCreateInfo = {};
2595 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2596 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002597 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002598
2599 VmaPool pool = nullptr;
2600 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002601 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002602
2603 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2604
2605 VmaAllocationCreateInfo allocCreateInfo = {};
2606 allocCreateInfo.pool = pool;
2607
2608 std::vector<BufferInfo> bufInfo;
2609 VmaAllocationInfo allocInfo;
2610
2611 // Test one-time free.
2612 {
2613 // Allocate buffers until we move to a second block.
2614 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2615 for(uint32_t i = 0; ; ++i)
2616 {
2617 BufferInfo newBufInfo;
2618 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2619 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002620 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002621 bufInfo.push_back(newBufInfo);
2622 if(lastMem && allocInfo.deviceMemory != lastMem)
2623 {
2624 break;
2625 }
2626 lastMem = allocInfo.deviceMemory;
2627 }
2628
Adam Sawickib8d34d52018-10-03 17:41:20 +02002629 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002630
2631 // Make sure that pool has now two blocks.
2632 VmaPoolStats poolStats = {};
2633 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002634 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002635
2636 // Destroy all the buffers in random order.
2637 while(!bufInfo.empty())
2638 {
2639 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2640 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2641 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2642 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2643 }
2644
2645 // Make sure that pool has now at most one block.
2646 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002647 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002648 }
2649
2650 // Test stack.
2651 {
2652 // Allocate buffers until we move to a second block.
2653 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2654 for(uint32_t i = 0; ; ++i)
2655 {
2656 BufferInfo newBufInfo;
2657 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2658 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002659 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002660 bufInfo.push_back(newBufInfo);
2661 if(lastMem && allocInfo.deviceMemory != lastMem)
2662 {
2663 break;
2664 }
2665 lastMem = allocInfo.deviceMemory;
2666 }
2667
Adam Sawickib8d34d52018-10-03 17:41:20 +02002668 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002669
2670 // Add few more buffers.
2671 for(uint32_t i = 0; i < 5; ++i)
2672 {
2673 BufferInfo newBufInfo;
2674 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2675 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002676 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002677 bufInfo.push_back(newBufInfo);
2678 }
2679
2680 // Make sure that pool has now two blocks.
2681 VmaPoolStats poolStats = {};
2682 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002683 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002684
2685 // Delete half of buffers, LIFO.
2686 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2687 {
2688 const BufferInfo& currBufInfo = bufInfo.back();
2689 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2690 bufInfo.pop_back();
2691 }
2692
2693 // Add one more buffer.
2694 BufferInfo newBufInfo;
2695 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2696 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002697 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002698 bufInfo.push_back(newBufInfo);
2699
2700 // Make sure that pool has now one block.
2701 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002702 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002703
2704 // Delete all the remaining buffers, LIFO.
2705 while(!bufInfo.empty())
2706 {
2707 const BufferInfo& currBufInfo = bufInfo.back();
2708 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2709 bufInfo.pop_back();
2710 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002711 }
2712
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002713 vmaDestroyPool(g_hAllocator, pool);
2714}
2715
Adam Sawickifd11d752018-08-22 15:02:10 +02002716static void ManuallyTestLinearAllocator()
2717{
2718 VmaStats origStats;
2719 vmaCalculateStats(g_hAllocator, &origStats);
2720
2721 wprintf(L"Manually test linear allocator\n");
2722
2723 RandomNumberGenerator rand{645332};
2724
2725 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2726 sampleBufCreateInfo.size = 1024; // Whatever.
2727 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2728
2729 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2730 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2731
2732 VmaPoolCreateInfo poolCreateInfo = {};
2733 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002734 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002735
2736 poolCreateInfo.blockSize = 10 * 1024;
2737 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2738 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2739
2740 VmaPool pool = nullptr;
2741 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002742 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002743
2744 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2745
2746 VmaAllocationCreateInfo allocCreateInfo = {};
2747 allocCreateInfo.pool = pool;
2748
2749 std::vector<BufferInfo> bufInfo;
2750 VmaAllocationInfo allocInfo;
2751 BufferInfo newBufInfo;
2752
2753 // Test double stack.
2754 {
2755 /*
2756 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2757 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2758
2759 Totally:
2760 1 block allocated
2761 10240 Vulkan bytes
2762 6 new allocations
2763 2256 bytes in allocations
2764 */
2765
2766 bufCreateInfo.size = 32;
2767 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2768 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002769 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002770 bufInfo.push_back(newBufInfo);
2771
2772 bufCreateInfo.size = 1024;
2773 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2774 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002775 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002776 bufInfo.push_back(newBufInfo);
2777
2778 bufCreateInfo.size = 32;
2779 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2780 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002781 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002782 bufInfo.push_back(newBufInfo);
2783
2784 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2785
2786 bufCreateInfo.size = 128;
2787 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2788 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002789 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002790 bufInfo.push_back(newBufInfo);
2791
2792 bufCreateInfo.size = 1024;
2793 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2794 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002795 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002796 bufInfo.push_back(newBufInfo);
2797
2798 bufCreateInfo.size = 16;
2799 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2800 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002801 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002802 bufInfo.push_back(newBufInfo);
2803
2804 VmaStats currStats;
2805 vmaCalculateStats(g_hAllocator, &currStats);
2806 VmaPoolStats poolStats;
2807 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2808
2809 char* statsStr = nullptr;
2810 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2811
2812 // PUT BREAKPOINT HERE TO CHECK.
2813 // Inspect: currStats versus origStats, poolStats, statsStr.
2814 int I = 0;
2815
2816 vmaFreeStatsString(g_hAllocator, statsStr);
2817
2818 // Destroy the buffers in reverse order.
2819 while(!bufInfo.empty())
2820 {
2821 const BufferInfo& currBufInfo = bufInfo.back();
2822 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2823 bufInfo.pop_back();
2824 }
2825 }
2826
2827 vmaDestroyPool(g_hAllocator, pool);
2828}
2829
Adam Sawicki80927152018-09-07 17:27:23 +02002830static void BenchmarkAlgorithmsCase(FILE* file,
2831 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002832 bool empty,
2833 VmaAllocationCreateFlags allocStrategy,
2834 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002835{
2836 RandomNumberGenerator rand{16223};
2837
2838 const VkDeviceSize bufSizeMin = 32;
2839 const VkDeviceSize bufSizeMax = 1024;
2840 const size_t maxBufCapacity = 10000;
2841 const uint32_t iterationCount = 10;
2842
2843 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2844 sampleBufCreateInfo.size = bufSizeMax;
2845 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2846
2847 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2848 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2849
2850 VmaPoolCreateInfo poolCreateInfo = {};
2851 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002852 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002853
2854 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002855 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002856 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2857
2858 VmaPool pool = nullptr;
2859 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002860 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002861
2862 // Buffer created just to get memory requirements. Never bound to any memory.
2863 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2864 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002865 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002866
2867 VkMemoryRequirements memReq = {};
2868 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2869
2870 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2871
2872 VmaAllocationCreateInfo allocCreateInfo = {};
2873 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002874 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002875
2876 VmaAllocation alloc;
2877 std::vector<VmaAllocation> baseAllocations;
2878
2879 if(!empty)
2880 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002881 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002882 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002883 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002884 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01002885 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
2886 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02002887 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2888 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002889 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002890 baseAllocations.push_back(alloc);
2891 totalSize += memReq.size;
2892 }
2893
2894 // Delete half of them, choose randomly.
2895 size_t allocsToDelete = baseAllocations.size() / 2;
2896 for(size_t i = 0; i < allocsToDelete; ++i)
2897 {
2898 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2899 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2900 baseAllocations.erase(baseAllocations.begin() + index);
2901 }
2902 }
2903
2904 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002905 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002906 std::vector<VmaAllocation> testAllocations;
2907 testAllocations.reserve(allocCount);
2908 duration allocTotalDuration = duration::zero();
2909 duration freeTotalDuration = duration::zero();
2910 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2911 {
2912 // Allocations
2913 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2914 for(size_t i = 0; i < allocCount; ++i)
2915 {
2916 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2917 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002918 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002919 testAllocations.push_back(alloc);
2920 }
2921 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2922
2923 // Deallocations
2924 switch(freeOrder)
2925 {
2926 case FREE_ORDER::FORWARD:
2927 // Leave testAllocations unchanged.
2928 break;
2929 case FREE_ORDER::BACKWARD:
2930 std::reverse(testAllocations.begin(), testAllocations.end());
2931 break;
2932 case FREE_ORDER::RANDOM:
2933 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2934 break;
2935 default: assert(0);
2936 }
2937
2938 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2939 for(size_t i = 0; i < allocCount; ++i)
2940 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2941 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2942
2943 testAllocations.clear();
2944 }
2945
2946 // Delete baseAllocations
2947 while(!baseAllocations.empty())
2948 {
2949 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2950 baseAllocations.pop_back();
2951 }
2952
2953 vmaDestroyPool(g_hAllocator, pool);
2954
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002955 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2956 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2957
Adam Sawicki80927152018-09-07 17:27:23 +02002958 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2959 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002960 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002961 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002962 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002963 allocTotalSeconds,
2964 freeTotalSeconds);
2965
2966 if(file)
2967 {
2968 std::string currTime;
2969 CurrentTimeToStr(currTime);
2970
Adam Sawicki80927152018-09-07 17:27:23 +02002971 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002972 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002973 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002974 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002975 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002976 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2977 allocTotalSeconds,
2978 freeTotalSeconds);
2979 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002980}
2981
Adam Sawicki80927152018-09-07 17:27:23 +02002982static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002983{
Adam Sawicki80927152018-09-07 17:27:23 +02002984 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002985
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002986 if(file)
2987 {
2988 fprintf(file,
2989 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002990 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002991 "Allocation time (s),Deallocation time (s)\n");
2992 }
2993
Adam Sawicki0a607132018-08-24 11:18:41 +02002994 uint32_t freeOrderCount = 1;
2995 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2996 freeOrderCount = 3;
2997 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2998 freeOrderCount = 2;
2999
3000 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003001 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003002
3003 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3004 {
3005 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3006 switch(freeOrderIndex)
3007 {
3008 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3009 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3010 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3011 default: assert(0);
3012 }
3013
3014 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3015 {
Adam Sawicki80927152018-09-07 17:27:23 +02003016 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003017 {
Adam Sawicki80927152018-09-07 17:27:23 +02003018 uint32_t algorithm = 0;
3019 switch(algorithmIndex)
3020 {
3021 case 0:
3022 break;
3023 case 1:
3024 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3025 break;
3026 case 2:
3027 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3028 break;
3029 default:
3030 assert(0);
3031 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003032
Adam Sawicki80927152018-09-07 17:27:23 +02003033 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003034 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3035 {
3036 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003037 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003038 {
3039 switch(allocStrategyIndex)
3040 {
3041 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3042 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3043 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3044 default: assert(0);
3045 }
3046 }
3047
Adam Sawicki80927152018-09-07 17:27:23 +02003048 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003049 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003050 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003051 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003052 strategy,
3053 freeOrder); // freeOrder
3054 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003055 }
3056 }
3057 }
3058}
3059
Adam Sawickib8333fb2018-03-13 16:15:53 +01003060static void TestPool_SameSize()
3061{
3062 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3063 const size_t BUF_COUNT = 100;
3064 VkResult res;
3065
3066 RandomNumberGenerator rand{123};
3067
3068 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3069 bufferInfo.size = BUF_SIZE;
3070 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3071
3072 uint32_t memoryTypeBits = UINT32_MAX;
3073 {
3074 VkBuffer dummyBuffer;
3075 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003076 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003077
3078 VkMemoryRequirements memReq;
3079 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3080 memoryTypeBits = memReq.memoryTypeBits;
3081
3082 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3083 }
3084
3085 VmaAllocationCreateInfo poolAllocInfo = {};
3086 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3087 uint32_t memTypeIndex;
3088 res = vmaFindMemoryTypeIndex(
3089 g_hAllocator,
3090 memoryTypeBits,
3091 &poolAllocInfo,
3092 &memTypeIndex);
3093
3094 VmaPoolCreateInfo poolCreateInfo = {};
3095 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3096 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3097 poolCreateInfo.minBlockCount = 1;
3098 poolCreateInfo.maxBlockCount = 4;
3099 poolCreateInfo.frameInUseCount = 0;
3100
3101 VmaPool pool;
3102 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003103 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003104
3105 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3106
3107 VmaAllocationCreateInfo allocInfo = {};
3108 allocInfo.pool = pool;
3109 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3110 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3111
3112 struct BufItem
3113 {
3114 VkBuffer Buf;
3115 VmaAllocation Alloc;
3116 };
3117 std::vector<BufItem> items;
3118
3119 // Fill entire pool.
3120 for(size_t i = 0; i < BUF_COUNT; ++i)
3121 {
3122 BufItem item;
3123 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003124 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003125 items.push_back(item);
3126 }
3127
3128 // Make sure that another allocation would fail.
3129 {
3130 BufItem item;
3131 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003132 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003133 }
3134
3135 // Validate that no buffer is lost. Also check that they are not mapped.
3136 for(size_t i = 0; i < items.size(); ++i)
3137 {
3138 VmaAllocationInfo allocInfo;
3139 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003140 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3141 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003142 }
3143
3144 // Free some percent of random items.
3145 {
3146 const size_t PERCENT_TO_FREE = 10;
3147 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3148 for(size_t i = 0; i < itemsToFree; ++i)
3149 {
3150 size_t index = (size_t)rand.Generate() % items.size();
3151 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3152 items.erase(items.begin() + index);
3153 }
3154 }
3155
3156 // Randomly allocate and free items.
3157 {
3158 const size_t OPERATION_COUNT = BUF_COUNT;
3159 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3160 {
3161 bool allocate = rand.Generate() % 2 != 0;
3162 if(allocate)
3163 {
3164 if(items.size() < BUF_COUNT)
3165 {
3166 BufItem item;
3167 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003168 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003169 items.push_back(item);
3170 }
3171 }
3172 else // Free
3173 {
3174 if(!items.empty())
3175 {
3176 size_t index = (size_t)rand.Generate() % items.size();
3177 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3178 items.erase(items.begin() + index);
3179 }
3180 }
3181 }
3182 }
3183
3184 // Allocate up to maximum.
3185 while(items.size() < BUF_COUNT)
3186 {
3187 BufItem item;
3188 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003189 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003190 items.push_back(item);
3191 }
3192
3193 // Validate that no buffer is lost.
3194 for(size_t i = 0; i < items.size(); ++i)
3195 {
3196 VmaAllocationInfo allocInfo;
3197 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003198 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003199 }
3200
3201 // Next frame.
3202 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3203
3204 // Allocate another BUF_COUNT buffers.
3205 for(size_t i = 0; i < BUF_COUNT; ++i)
3206 {
3207 BufItem item;
3208 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003209 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003210 items.push_back(item);
3211 }
3212
3213 // Make sure the first BUF_COUNT is lost. Delete them.
3214 for(size_t i = 0; i < BUF_COUNT; ++i)
3215 {
3216 VmaAllocationInfo allocInfo;
3217 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003218 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003219 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3220 }
3221 items.erase(items.begin(), items.begin() + BUF_COUNT);
3222
3223 // Validate that no buffer is lost.
3224 for(size_t i = 0; i < items.size(); ++i)
3225 {
3226 VmaAllocationInfo allocInfo;
3227 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003228 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003229 }
3230
3231 // Free one item.
3232 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3233 items.pop_back();
3234
3235 // Validate statistics.
3236 {
3237 VmaPoolStats poolStats = {};
3238 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003239 TEST(poolStats.allocationCount == items.size());
3240 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3241 TEST(poolStats.unusedRangeCount == 1);
3242 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3243 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003244 }
3245
3246 // Free all remaining items.
3247 for(size_t i = items.size(); i--; )
3248 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3249 items.clear();
3250
3251 // Allocate maximum items again.
3252 for(size_t i = 0; i < BUF_COUNT; ++i)
3253 {
3254 BufItem item;
3255 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003256 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003257 items.push_back(item);
3258 }
3259
3260 // Delete every other item.
3261 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3262 {
3263 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3264 items.erase(items.begin() + i);
3265 }
3266
3267 // Defragment!
3268 {
3269 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3270 for(size_t i = 0; i < items.size(); ++i)
3271 allocationsToDefragment[i] = items[i].Alloc;
3272
3273 VmaDefragmentationStats defragmentationStats;
3274 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003275 TEST(res == VK_SUCCESS);
3276 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003277 }
3278
3279 // Free all remaining items.
3280 for(size_t i = items.size(); i--; )
3281 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3282 items.clear();
3283
3284 ////////////////////////////////////////////////////////////////////////////////
3285 // Test for vmaMakePoolAllocationsLost
3286
3287 // Allocate 4 buffers on frame 10.
3288 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3289 for(size_t i = 0; i < 4; ++i)
3290 {
3291 BufItem item;
3292 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003293 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003294 items.push_back(item);
3295 }
3296
3297 // Touch first 2 of them on frame 11.
3298 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3299 for(size_t i = 0; i < 2; ++i)
3300 {
3301 VmaAllocationInfo allocInfo;
3302 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3303 }
3304
3305 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3306 size_t lostCount = 0xDEADC0DE;
3307 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003308 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003309
3310 // Make another call. Now 0 should be lost.
3311 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003312 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003313
3314 // Make another call, with null count. Should not crash.
3315 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3316
3317 // END: Free all remaining items.
3318 for(size_t i = items.size(); i--; )
3319 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3320
3321 items.clear();
3322
Adam Sawickid2924172018-06-11 12:48:46 +02003323 ////////////////////////////////////////////////////////////////////////////////
3324 // Test for allocation too large for pool
3325
3326 {
3327 VmaAllocationCreateInfo allocCreateInfo = {};
3328 allocCreateInfo.pool = pool;
3329
3330 VkMemoryRequirements memReq;
3331 memReq.memoryTypeBits = UINT32_MAX;
3332 memReq.alignment = 1;
3333 memReq.size = poolCreateInfo.blockSize + 4;
3334
3335 VmaAllocation alloc = nullptr;
3336 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003337 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003338 }
3339
Adam Sawickib8333fb2018-03-13 16:15:53 +01003340 vmaDestroyPool(g_hAllocator, pool);
3341}
3342
Adam Sawickib0c36362018-11-13 16:17:38 +01003343static void TestResize()
3344{
3345 wprintf(L"Testing vmaResizeAllocation...\n");
3346
3347 const VkDeviceSize KILOBYTE = 1024ull;
3348 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3349
3350 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3351 bufCreateInfo.size = 2 * MEGABYTE;
3352 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3353
3354 VmaAllocationCreateInfo allocCreateInfo = {};
3355 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3356
3357 uint32_t memTypeIndex = UINT32_MAX;
3358 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3359
3360 VmaPoolCreateInfo poolCreateInfo = {};
3361 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3362 poolCreateInfo.blockSize = 8 * MEGABYTE;
3363 poolCreateInfo.minBlockCount = 1;
3364 poolCreateInfo.maxBlockCount = 1;
3365 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3366
3367 VmaPool pool;
3368 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3369
3370 allocCreateInfo.pool = pool;
3371
3372 // Fill 8 MB pool with 4 * 2 MB allocations.
3373 VmaAllocation allocs[4] = {};
3374
3375 VkMemoryRequirements memReq = {};
3376 memReq.memoryTypeBits = UINT32_MAX;
3377 memReq.alignment = 4;
3378 memReq.size = bufCreateInfo.size;
3379
3380 VmaAllocationInfo allocInfo = {};
3381
3382 for(uint32_t i = 0; i < 4; ++i)
3383 {
3384 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3385 }
3386
3387 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3388
3389 // Case: Resize to the same size always succeeds.
3390 {
3391 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3392 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3393 TEST(allocInfo.size == 2ull * 1024 * 1024);
3394 }
3395
3396 // Case: Shrink allocation at the end.
3397 {
3398 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3399 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3400 TEST(allocInfo.size == 1ull * 1024 * 1024);
3401 }
3402
3403 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3404
3405 // Case: Shrink allocation before free space.
3406 {
3407 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3408 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3409 TEST(allocInfo.size == 512 * KILOBYTE);
3410 }
3411
3412 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3413
3414 // Case: Shrink allocation before next allocation.
3415 {
3416 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3417 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3418 TEST(allocInfo.size == 1 * MEGABYTE);
3419 }
3420
3421 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3422
3423 // Case: Grow allocation while there is even more space available.
3424 {
3425 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3426 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3427 TEST(allocInfo.size == 1 * MEGABYTE);
3428 }
3429
3430 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3431
3432 // Case: Grow allocation while there is exact amount of free space available.
3433 {
3434 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3435 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3436 TEST(allocInfo.size == 2 * MEGABYTE);
3437 }
3438
3439 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3440
3441 // Case: Fail to grow when there is not enough free space due to next allocation.
3442 {
3443 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3444 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3445 TEST(allocInfo.size == 2 * MEGABYTE);
3446 }
3447
3448 // Case: Fail to grow when there is not enough free space due to end of memory block.
3449 {
3450 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3451 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3452 TEST(allocInfo.size == 1 * MEGABYTE);
3453 }
3454
3455 for(uint32_t i = 4; i--; )
3456 {
3457 vmaFreeMemory(g_hAllocator, allocs[i]);
3458 }
3459
3460 vmaDestroyPool(g_hAllocator, pool);
3461
3462 // Test dedicated allocation
3463 {
3464 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3465 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3466 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3467
3468 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3469 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3470
3471 // Case: Resize to the same size always succeeds.
3472 {
3473 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3474 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3475 TEST(allocInfo.size == 2ull * 1024 * 1024);
3476 }
3477
3478 // Case: Shrinking fails.
3479 {
3480 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3481 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3482 TEST(allocInfo.size == 2ull * 1024 * 1024);
3483 }
3484
3485 // Case: Growing fails.
3486 {
3487 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3488 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3489 TEST(allocInfo.size == 2ull * 1024 * 1024);
3490 }
3491
3492 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3493 }
3494}
3495
Adam Sawickie44c6262018-06-15 14:30:39 +02003496static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3497{
3498 const uint8_t* pBytes = (const uint8_t*)pMemory;
3499 for(size_t i = 0; i < size; ++i)
3500 {
3501 if(pBytes[i] != pattern)
3502 {
3503 return false;
3504 }
3505 }
3506 return true;
3507}
3508
3509static void TestAllocationsInitialization()
3510{
3511 VkResult res;
3512
3513 const size_t BUF_SIZE = 1024;
3514
3515 // Create pool.
3516
3517 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3518 bufInfo.size = BUF_SIZE;
3519 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3520
3521 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3522 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3523
3524 VmaPoolCreateInfo poolCreateInfo = {};
3525 poolCreateInfo.blockSize = BUF_SIZE * 10;
3526 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3527 poolCreateInfo.maxBlockCount = 1;
3528 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003529 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003530
3531 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3532 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003533 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003534
3535 // Create one persistently mapped buffer to keep memory of this block mapped,
3536 // so that pointer to mapped data will remain (more or less...) valid even
3537 // after destruction of other allocations.
3538
3539 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3540 VkBuffer firstBuf;
3541 VmaAllocation firstAlloc;
3542 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003543 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003544
3545 // Test buffers.
3546
3547 for(uint32_t i = 0; i < 2; ++i)
3548 {
3549 const bool persistentlyMapped = i == 0;
3550 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3551 VkBuffer buf;
3552 VmaAllocation alloc;
3553 VmaAllocationInfo allocInfo;
3554 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003555 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003556
3557 void* pMappedData;
3558 if(!persistentlyMapped)
3559 {
3560 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003561 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003562 }
3563 else
3564 {
3565 pMappedData = allocInfo.pMappedData;
3566 }
3567
3568 // Validate initialized content
3569 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003570 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003571
3572 if(!persistentlyMapped)
3573 {
3574 vmaUnmapMemory(g_hAllocator, alloc);
3575 }
3576
3577 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3578
3579 // Validate freed content
3580 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003581 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003582 }
3583
3584 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3585 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3586}
3587
Adam Sawickib8333fb2018-03-13 16:15:53 +01003588static void TestPool_Benchmark(
3589 PoolTestResult& outResult,
3590 const PoolTestConfig& config)
3591{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003592 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003593
3594 RandomNumberGenerator mainRand{config.RandSeed};
3595
3596 uint32_t allocationSizeProbabilitySum = std::accumulate(
3597 config.AllocationSizes.begin(),
3598 config.AllocationSizes.end(),
3599 0u,
3600 [](uint32_t sum, const AllocationSize& allocSize) {
3601 return sum + allocSize.Probability;
3602 });
3603
3604 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3605 bufferInfo.size = 256; // Whatever.
3606 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3607
3608 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3609 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3610 imageInfo.extent.width = 256; // Whatever.
3611 imageInfo.extent.height = 256; // Whatever.
3612 imageInfo.extent.depth = 1;
3613 imageInfo.mipLevels = 1;
3614 imageInfo.arrayLayers = 1;
3615 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3616 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3617 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3618 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3619 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3620
3621 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3622 {
3623 VkBuffer dummyBuffer;
3624 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003625 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003626
3627 VkMemoryRequirements memReq;
3628 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3629 bufferMemoryTypeBits = memReq.memoryTypeBits;
3630
3631 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3632 }
3633
3634 uint32_t imageMemoryTypeBits = UINT32_MAX;
3635 {
3636 VkImage dummyImage;
3637 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003638 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003639
3640 VkMemoryRequirements memReq;
3641 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3642 imageMemoryTypeBits = memReq.memoryTypeBits;
3643
3644 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3645 }
3646
3647 uint32_t memoryTypeBits = 0;
3648 if(config.UsesBuffers() && config.UsesImages())
3649 {
3650 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3651 if(memoryTypeBits == 0)
3652 {
3653 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3654 return;
3655 }
3656 }
3657 else if(config.UsesBuffers())
3658 memoryTypeBits = bufferMemoryTypeBits;
3659 else if(config.UsesImages())
3660 memoryTypeBits = imageMemoryTypeBits;
3661 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003662 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003663
3664 VmaPoolCreateInfo poolCreateInfo = {};
3665 poolCreateInfo.memoryTypeIndex = 0;
3666 poolCreateInfo.minBlockCount = 1;
3667 poolCreateInfo.maxBlockCount = 1;
3668 poolCreateInfo.blockSize = config.PoolSize;
3669 poolCreateInfo.frameInUseCount = 1;
3670
3671 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3672 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3673 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3674
3675 VmaPool pool;
3676 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003677 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003678
3679 // Start time measurement - after creating pool and initializing data structures.
3680 time_point timeBeg = std::chrono::high_resolution_clock::now();
3681
3682 ////////////////////////////////////////////////////////////////////////////////
3683 // ThreadProc
3684 auto ThreadProc = [&](
3685 PoolTestThreadResult* outThreadResult,
3686 uint32_t randSeed,
3687 HANDLE frameStartEvent,
3688 HANDLE frameEndEvent) -> void
3689 {
3690 RandomNumberGenerator threadRand{randSeed};
3691
3692 outThreadResult->AllocationTimeMin = duration::max();
3693 outThreadResult->AllocationTimeSum = duration::zero();
3694 outThreadResult->AllocationTimeMax = duration::min();
3695 outThreadResult->DeallocationTimeMin = duration::max();
3696 outThreadResult->DeallocationTimeSum = duration::zero();
3697 outThreadResult->DeallocationTimeMax = duration::min();
3698 outThreadResult->AllocationCount = 0;
3699 outThreadResult->DeallocationCount = 0;
3700 outThreadResult->LostAllocationCount = 0;
3701 outThreadResult->LostAllocationTotalSize = 0;
3702 outThreadResult->FailedAllocationCount = 0;
3703 outThreadResult->FailedAllocationTotalSize = 0;
3704
3705 struct Item
3706 {
3707 VkDeviceSize BufferSize;
3708 VkExtent2D ImageSize;
3709 VkBuffer Buf;
3710 VkImage Image;
3711 VmaAllocation Alloc;
3712
3713 VkDeviceSize CalcSizeBytes() const
3714 {
3715 return BufferSize +
3716 ImageSize.width * ImageSize.height * 4;
3717 }
3718 };
3719 std::vector<Item> unusedItems, usedItems;
3720
3721 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3722
3723 // Create all items - all unused, not yet allocated.
3724 for(size_t i = 0; i < threadTotalItemCount; ++i)
3725 {
3726 Item item = {};
3727
3728 uint32_t allocSizeIndex = 0;
3729 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3730 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3731 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3732
3733 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3734 if(allocSize.BufferSizeMax > 0)
3735 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003736 TEST(allocSize.BufferSizeMin > 0);
3737 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003738 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3739 item.BufferSize = allocSize.BufferSizeMin;
3740 else
3741 {
3742 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3743 item.BufferSize = item.BufferSize / 16 * 16;
3744 }
3745 }
3746 else
3747 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003748 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003749 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3750 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3751 else
3752 {
3753 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3754 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3755 }
3756 }
3757
3758 unusedItems.push_back(item);
3759 }
3760
3761 auto Allocate = [&](Item& item) -> VkResult
3762 {
3763 VmaAllocationCreateInfo allocCreateInfo = {};
3764 allocCreateInfo.pool = pool;
3765 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3766 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3767
3768 if(item.BufferSize)
3769 {
3770 bufferInfo.size = item.BufferSize;
3771 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3772 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3773 }
3774 else
3775 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003776 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003777
3778 imageInfo.extent.width = item.ImageSize.width;
3779 imageInfo.extent.height = item.ImageSize.height;
3780 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3781 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3782 }
3783 };
3784
3785 ////////////////////////////////////////////////////////////////////////////////
3786 // Frames
3787 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3788 {
3789 WaitForSingleObject(frameStartEvent, INFINITE);
3790
3791 // Always make some percent of used bufs unused, to choose different used ones.
3792 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3793 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3794 {
3795 size_t index = threadRand.Generate() % usedItems.size();
3796 unusedItems.push_back(usedItems[index]);
3797 usedItems.erase(usedItems.begin() + index);
3798 }
3799
3800 // Determine which bufs we want to use in this frame.
3801 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3802 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003803 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003804 // Move some used to unused.
3805 while(usedBufCount < usedItems.size())
3806 {
3807 size_t index = threadRand.Generate() % usedItems.size();
3808 unusedItems.push_back(usedItems[index]);
3809 usedItems.erase(usedItems.begin() + index);
3810 }
3811 // Move some unused to used.
3812 while(usedBufCount > usedItems.size())
3813 {
3814 size_t index = threadRand.Generate() % unusedItems.size();
3815 usedItems.push_back(unusedItems[index]);
3816 unusedItems.erase(unusedItems.begin() + index);
3817 }
3818
3819 uint32_t touchExistingCount = 0;
3820 uint32_t touchLostCount = 0;
3821 uint32_t createSucceededCount = 0;
3822 uint32_t createFailedCount = 0;
3823
3824 // Touch all used bufs. If not created or lost, allocate.
3825 for(size_t i = 0; i < usedItems.size(); ++i)
3826 {
3827 Item& item = usedItems[i];
3828 // Not yet created.
3829 if(item.Alloc == VK_NULL_HANDLE)
3830 {
3831 res = Allocate(item);
3832 ++outThreadResult->AllocationCount;
3833 if(res != VK_SUCCESS)
3834 {
3835 item.Alloc = VK_NULL_HANDLE;
3836 item.Buf = VK_NULL_HANDLE;
3837 ++outThreadResult->FailedAllocationCount;
3838 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3839 ++createFailedCount;
3840 }
3841 else
3842 ++createSucceededCount;
3843 }
3844 else
3845 {
3846 // Touch.
3847 VmaAllocationInfo allocInfo;
3848 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3849 // Lost.
3850 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3851 {
3852 ++touchLostCount;
3853
3854 // Destroy.
3855 {
3856 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3857 if(item.Buf)
3858 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3859 else
3860 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3861 ++outThreadResult->DeallocationCount;
3862 }
3863 item.Alloc = VK_NULL_HANDLE;
3864 item.Buf = VK_NULL_HANDLE;
3865
3866 ++outThreadResult->LostAllocationCount;
3867 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3868
3869 // Recreate.
3870 res = Allocate(item);
3871 ++outThreadResult->AllocationCount;
3872 // Creation failed.
3873 if(res != VK_SUCCESS)
3874 {
3875 ++outThreadResult->FailedAllocationCount;
3876 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3877 ++createFailedCount;
3878 }
3879 else
3880 ++createSucceededCount;
3881 }
3882 else
3883 ++touchExistingCount;
3884 }
3885 }
3886
3887 /*
3888 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3889 randSeed, frameIndex,
3890 touchExistingCount, touchLostCount,
3891 createSucceededCount, createFailedCount);
3892 */
3893
3894 SetEvent(frameEndEvent);
3895 }
3896
3897 // Free all remaining items.
3898 for(size_t i = usedItems.size(); i--; )
3899 {
3900 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3901 if(usedItems[i].Buf)
3902 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3903 else
3904 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3905 ++outThreadResult->DeallocationCount;
3906 }
3907 for(size_t i = unusedItems.size(); i--; )
3908 {
3909 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3910 if(unusedItems[i].Buf)
3911 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3912 else
3913 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3914 ++outThreadResult->DeallocationCount;
3915 }
3916 };
3917
3918 // Launch threads.
3919 uint32_t threadRandSeed = mainRand.Generate();
3920 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3921 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3922 std::vector<std::thread> bkgThreads;
3923 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3924 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3925 {
3926 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3927 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3928 bkgThreads.emplace_back(std::bind(
3929 ThreadProc,
3930 &threadResults[threadIndex],
3931 threadRandSeed + threadIndex,
3932 frameStartEvents[threadIndex],
3933 frameEndEvents[threadIndex]));
3934 }
3935
3936 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003937 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003938 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3939 {
3940 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3941 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3942 SetEvent(frameStartEvents[threadIndex]);
3943 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3944 }
3945
3946 // Wait for threads finished
3947 for(size_t i = 0; i < bkgThreads.size(); ++i)
3948 {
3949 bkgThreads[i].join();
3950 CloseHandle(frameEndEvents[i]);
3951 CloseHandle(frameStartEvents[i]);
3952 }
3953 bkgThreads.clear();
3954
3955 // Finish time measurement - before destroying pool.
3956 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3957
3958 vmaDestroyPool(g_hAllocator, pool);
3959
3960 outResult.AllocationTimeMin = duration::max();
3961 outResult.AllocationTimeAvg = duration::zero();
3962 outResult.AllocationTimeMax = duration::min();
3963 outResult.DeallocationTimeMin = duration::max();
3964 outResult.DeallocationTimeAvg = duration::zero();
3965 outResult.DeallocationTimeMax = duration::min();
3966 outResult.LostAllocationCount = 0;
3967 outResult.LostAllocationTotalSize = 0;
3968 outResult.FailedAllocationCount = 0;
3969 outResult.FailedAllocationTotalSize = 0;
3970 size_t allocationCount = 0;
3971 size_t deallocationCount = 0;
3972 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3973 {
3974 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3975 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3976 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3977 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3978 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3979 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3980 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3981 allocationCount += threadResult.AllocationCount;
3982 deallocationCount += threadResult.DeallocationCount;
3983 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3984 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3985 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3986 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3987 }
3988 if(allocationCount)
3989 outResult.AllocationTimeAvg /= allocationCount;
3990 if(deallocationCount)
3991 outResult.DeallocationTimeAvg /= deallocationCount;
3992}
3993
3994static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3995{
3996 if(ptr1 < ptr2)
3997 return ptr1 + size1 > ptr2;
3998 else if(ptr2 < ptr1)
3999 return ptr2 + size2 > ptr1;
4000 else
4001 return true;
4002}
4003
4004static void TestMapping()
4005{
4006 wprintf(L"Testing mapping...\n");
4007
4008 VkResult res;
4009 uint32_t memTypeIndex = UINT32_MAX;
4010
4011 enum TEST
4012 {
4013 TEST_NORMAL,
4014 TEST_POOL,
4015 TEST_DEDICATED,
4016 TEST_COUNT
4017 };
4018 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4019 {
4020 VmaPool pool = nullptr;
4021 if(testIndex == TEST_POOL)
4022 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004023 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004024 VmaPoolCreateInfo poolInfo = {};
4025 poolInfo.memoryTypeIndex = memTypeIndex;
4026 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004027 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004028 }
4029
4030 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4031 bufInfo.size = 0x10000;
4032 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4033
4034 VmaAllocationCreateInfo allocCreateInfo = {};
4035 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4036 allocCreateInfo.pool = pool;
4037 if(testIndex == TEST_DEDICATED)
4038 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4039
4040 VmaAllocationInfo allocInfo;
4041
4042 // Mapped manually
4043
4044 // Create 2 buffers.
4045 BufferInfo bufferInfos[3];
4046 for(size_t i = 0; i < 2; ++i)
4047 {
4048 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4049 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004050 TEST(res == VK_SUCCESS);
4051 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004052 memTypeIndex = allocInfo.memoryType;
4053 }
4054
4055 // Map buffer 0.
4056 char* data00 = nullptr;
4057 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004058 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004059 data00[0xFFFF] = data00[0];
4060
4061 // Map buffer 0 second time.
4062 char* data01 = nullptr;
4063 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004064 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004065
4066 // Map buffer 1.
4067 char* data1 = nullptr;
4068 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004069 TEST(res == VK_SUCCESS && data1 != nullptr);
4070 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004071 data1[0xFFFF] = data1[0];
4072
4073 // Unmap buffer 0 two times.
4074 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4075 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4076 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004077 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004078
4079 // Unmap buffer 1.
4080 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4081 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004082 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004083
4084 // Create 3rd buffer - persistently mapped.
4085 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4086 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4087 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004088 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004089
4090 // Map buffer 2.
4091 char* data2 = nullptr;
4092 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004093 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004094 data2[0xFFFF] = data2[0];
4095
4096 // Unmap buffer 2.
4097 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4098 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004099 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004100
4101 // Destroy all buffers.
4102 for(size_t i = 3; i--; )
4103 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4104
4105 vmaDestroyPool(g_hAllocator, pool);
4106 }
4107}
4108
4109static void TestMappingMultithreaded()
4110{
4111 wprintf(L"Testing mapping multithreaded...\n");
4112
4113 static const uint32_t threadCount = 16;
4114 static const uint32_t bufferCount = 1024;
4115 static const uint32_t threadBufferCount = bufferCount / threadCount;
4116
4117 VkResult res;
4118 volatile uint32_t memTypeIndex = UINT32_MAX;
4119
4120 enum TEST
4121 {
4122 TEST_NORMAL,
4123 TEST_POOL,
4124 TEST_DEDICATED,
4125 TEST_COUNT
4126 };
4127 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4128 {
4129 VmaPool pool = nullptr;
4130 if(testIndex == TEST_POOL)
4131 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004132 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004133 VmaPoolCreateInfo poolInfo = {};
4134 poolInfo.memoryTypeIndex = memTypeIndex;
4135 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004136 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004137 }
4138
4139 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4140 bufCreateInfo.size = 0x10000;
4141 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4142
4143 VmaAllocationCreateInfo allocCreateInfo = {};
4144 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4145 allocCreateInfo.pool = pool;
4146 if(testIndex == TEST_DEDICATED)
4147 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4148
4149 std::thread threads[threadCount];
4150 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4151 {
4152 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4153 // ======== THREAD FUNCTION ========
4154
4155 RandomNumberGenerator rand{threadIndex};
4156
4157 enum class MODE
4158 {
4159 // Don't map this buffer at all.
4160 DONT_MAP,
4161 // Map and quickly unmap.
4162 MAP_FOR_MOMENT,
4163 // Map and unmap before destruction.
4164 MAP_FOR_LONGER,
4165 // Map two times. Quickly unmap, second unmap before destruction.
4166 MAP_TWO_TIMES,
4167 // Create this buffer as persistently mapped.
4168 PERSISTENTLY_MAPPED,
4169 COUNT
4170 };
4171 std::vector<BufferInfo> bufInfos{threadBufferCount};
4172 std::vector<MODE> bufModes{threadBufferCount};
4173
4174 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4175 {
4176 BufferInfo& bufInfo = bufInfos[bufferIndex];
4177 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4178 bufModes[bufferIndex] = mode;
4179
4180 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4181 if(mode == MODE::PERSISTENTLY_MAPPED)
4182 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4183
4184 VmaAllocationInfo allocInfo;
4185 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4186 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004187 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004188
4189 if(memTypeIndex == UINT32_MAX)
4190 memTypeIndex = allocInfo.memoryType;
4191
4192 char* data = nullptr;
4193
4194 if(mode == MODE::PERSISTENTLY_MAPPED)
4195 {
4196 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004197 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004198 }
4199 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4200 mode == MODE::MAP_TWO_TIMES)
4201 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004202 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004203 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004204 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004205
4206 if(mode == MODE::MAP_TWO_TIMES)
4207 {
4208 char* data2 = nullptr;
4209 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004210 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004211 }
4212 }
4213 else if(mode == MODE::DONT_MAP)
4214 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004215 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004216 }
4217 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004218 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004219
4220 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4221 if(data)
4222 data[0xFFFF] = data[0];
4223
4224 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4225 {
4226 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4227
4228 VmaAllocationInfo allocInfo;
4229 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4230 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004231 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004232 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004233 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004234 }
4235
4236 switch(rand.Generate() % 3)
4237 {
4238 case 0: Sleep(0); break; // Yield.
4239 case 1: Sleep(10); break; // 10 ms
4240 // default: No sleep.
4241 }
4242
4243 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4244 if(data)
4245 data[0xFFFF] = data[0];
4246 }
4247
4248 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4249 {
4250 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4251 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4252 {
4253 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4254
4255 VmaAllocationInfo allocInfo;
4256 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004257 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004258 }
4259
4260 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4261 }
4262 });
4263 }
4264
4265 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4266 threads[threadIndex].join();
4267
4268 vmaDestroyPool(g_hAllocator, pool);
4269 }
4270}
4271
4272static void WriteMainTestResultHeader(FILE* file)
4273{
4274 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004275 "Code,Time,"
4276 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004277 "Total Time (us),"
4278 "Allocation Time Min (us),"
4279 "Allocation Time Avg (us),"
4280 "Allocation Time Max (us),"
4281 "Deallocation Time Min (us),"
4282 "Deallocation Time Avg (us),"
4283 "Deallocation Time Max (us),"
4284 "Total Memory Allocated (B),"
4285 "Free Range Size Avg (B),"
4286 "Free Range Size Max (B)\n");
4287}
4288
4289static void WriteMainTestResult(
4290 FILE* file,
4291 const char* codeDescription,
4292 const char* testDescription,
4293 const Config& config, const Result& result)
4294{
4295 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4296 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4297 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4298 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4299 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4300 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4301 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4302
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004303 std::string currTime;
4304 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004305
4306 fprintf(file,
4307 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004308 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4309 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004310 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004311 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004312 totalTimeSeconds * 1e6f,
4313 allocationTimeMinSeconds * 1e6f,
4314 allocationTimeAvgSeconds * 1e6f,
4315 allocationTimeMaxSeconds * 1e6f,
4316 deallocationTimeMinSeconds * 1e6f,
4317 deallocationTimeAvgSeconds * 1e6f,
4318 deallocationTimeMaxSeconds * 1e6f,
4319 result.TotalMemoryAllocated,
4320 result.FreeRangeSizeAvg,
4321 result.FreeRangeSizeMax);
4322}
4323
4324static void WritePoolTestResultHeader(FILE* file)
4325{
4326 fprintf(file,
4327 "Code,Test,Time,"
4328 "Config,"
4329 "Total Time (us),"
4330 "Allocation Time Min (us),"
4331 "Allocation Time Avg (us),"
4332 "Allocation Time Max (us),"
4333 "Deallocation Time Min (us),"
4334 "Deallocation Time Avg (us),"
4335 "Deallocation Time Max (us),"
4336 "Lost Allocation Count,"
4337 "Lost Allocation Total Size (B),"
4338 "Failed Allocation Count,"
4339 "Failed Allocation Total Size (B)\n");
4340}
4341
4342static void WritePoolTestResult(
4343 FILE* file,
4344 const char* codeDescription,
4345 const char* testDescription,
4346 const PoolTestConfig& config,
4347 const PoolTestResult& result)
4348{
4349 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4350 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4351 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4352 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4353 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4354 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4355 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4356
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004357 std::string currTime;
4358 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004359
4360 fprintf(file,
4361 "%s,%s,%s,"
4362 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4363 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4364 // General
4365 codeDescription,
4366 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004367 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004368 // Config
4369 config.ThreadCount,
4370 (unsigned long long)config.PoolSize,
4371 config.FrameCount,
4372 config.TotalItemCount,
4373 config.UsedItemCountMin,
4374 config.UsedItemCountMax,
4375 config.ItemsToMakeUnusedPercent,
4376 // Results
4377 totalTimeSeconds * 1e6f,
4378 allocationTimeMinSeconds * 1e6f,
4379 allocationTimeAvgSeconds * 1e6f,
4380 allocationTimeMaxSeconds * 1e6f,
4381 deallocationTimeMinSeconds * 1e6f,
4382 deallocationTimeAvgSeconds * 1e6f,
4383 deallocationTimeMaxSeconds * 1e6f,
4384 result.LostAllocationCount,
4385 result.LostAllocationTotalSize,
4386 result.FailedAllocationCount,
4387 result.FailedAllocationTotalSize);
4388}
4389
4390static void PerformCustomMainTest(FILE* file)
4391{
4392 Config config{};
4393 config.RandSeed = 65735476;
4394 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4395 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4396 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4397 config.FreeOrder = FREE_ORDER::FORWARD;
4398 config.ThreadCount = 16;
4399 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004400 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004401
4402 // Buffers
4403 //config.AllocationSizes.push_back({4, 16, 1024});
4404 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4405
4406 // Images
4407 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4408 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4409
4410 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4411 config.AdditionalOperationCount = 1024;
4412
4413 Result result{};
4414 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004415 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004416 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4417}
4418
4419static void PerformCustomPoolTest(FILE* file)
4420{
4421 PoolTestConfig config;
4422 config.PoolSize = 100 * 1024 * 1024;
4423 config.RandSeed = 2345764;
4424 config.ThreadCount = 1;
4425 config.FrameCount = 200;
4426 config.ItemsToMakeUnusedPercent = 2;
4427
4428 AllocationSize allocSize = {};
4429 allocSize.BufferSizeMin = 1024;
4430 allocSize.BufferSizeMax = 1024 * 1024;
4431 allocSize.Probability = 1;
4432 config.AllocationSizes.push_back(allocSize);
4433
4434 allocSize.BufferSizeMin = 0;
4435 allocSize.BufferSizeMax = 0;
4436 allocSize.ImageSizeMin = 128;
4437 allocSize.ImageSizeMax = 1024;
4438 allocSize.Probability = 1;
4439 config.AllocationSizes.push_back(allocSize);
4440
4441 config.PoolSize = config.CalcAvgResourceSize() * 200;
4442 config.UsedItemCountMax = 160;
4443 config.TotalItemCount = config.UsedItemCountMax * 10;
4444 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4445
4446 g_MemoryAliasingWarningEnabled = false;
4447 PoolTestResult result = {};
4448 TestPool_Benchmark(result, config);
4449 g_MemoryAliasingWarningEnabled = true;
4450
4451 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4452}
4453
Adam Sawickib8333fb2018-03-13 16:15:53 +01004454static void PerformMainTests(FILE* file)
4455{
4456 uint32_t repeatCount = 1;
4457 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4458
4459 Config config{};
4460 config.RandSeed = 65735476;
4461 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4462 config.FreeOrder = FREE_ORDER::FORWARD;
4463
4464 size_t threadCountCount = 1;
4465 switch(ConfigType)
4466 {
4467 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4468 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4469 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4470 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4471 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4472 default: assert(0);
4473 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004474
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004475 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004476
Adam Sawickib8333fb2018-03-13 16:15:53 +01004477 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4478 {
4479 std::string desc1;
4480
4481 switch(threadCountIndex)
4482 {
4483 case 0:
4484 desc1 += "1_thread";
4485 config.ThreadCount = 1;
4486 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4487 break;
4488 case 1:
4489 desc1 += "16_threads+0%_common";
4490 config.ThreadCount = 16;
4491 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4492 break;
4493 case 2:
4494 desc1 += "16_threads+50%_common";
4495 config.ThreadCount = 16;
4496 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4497 break;
4498 case 3:
4499 desc1 += "16_threads+100%_common";
4500 config.ThreadCount = 16;
4501 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4502 break;
4503 case 4:
4504 desc1 += "2_threads+0%_common";
4505 config.ThreadCount = 2;
4506 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4507 break;
4508 case 5:
4509 desc1 += "2_threads+50%_common";
4510 config.ThreadCount = 2;
4511 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4512 break;
4513 case 6:
4514 desc1 += "2_threads+100%_common";
4515 config.ThreadCount = 2;
4516 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4517 break;
4518 default:
4519 assert(0);
4520 }
4521
4522 // 0 = buffers, 1 = images, 2 = buffers and images
4523 size_t buffersVsImagesCount = 2;
4524 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4525 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4526 {
4527 std::string desc2 = desc1;
4528 switch(buffersVsImagesIndex)
4529 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004530 case 0: desc2 += ",Buffers"; break;
4531 case 1: desc2 += ",Images"; break;
4532 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004533 default: assert(0);
4534 }
4535
4536 // 0 = small, 1 = large, 2 = small and large
4537 size_t smallVsLargeCount = 2;
4538 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4539 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4540 {
4541 std::string desc3 = desc2;
4542 switch(smallVsLargeIndex)
4543 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004544 case 0: desc3 += ",Small"; break;
4545 case 1: desc3 += ",Large"; break;
4546 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004547 default: assert(0);
4548 }
4549
4550 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4551 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4552 else
4553 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4554
4555 // 0 = varying sizes min...max, 1 = set of constant sizes
4556 size_t constantSizesCount = 1;
4557 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4558 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4559 {
4560 std::string desc4 = desc3;
4561 switch(constantSizesIndex)
4562 {
4563 case 0: desc4 += " Varying_sizes"; break;
4564 case 1: desc4 += " Constant_sizes"; break;
4565 default: assert(0);
4566 }
4567
4568 config.AllocationSizes.clear();
4569 // Buffers present
4570 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4571 {
4572 // Small
4573 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4574 {
4575 // Varying size
4576 if(constantSizesIndex == 0)
4577 config.AllocationSizes.push_back({4, 16, 1024});
4578 // Constant sizes
4579 else
4580 {
4581 config.AllocationSizes.push_back({1, 16, 16});
4582 config.AllocationSizes.push_back({1, 64, 64});
4583 config.AllocationSizes.push_back({1, 256, 256});
4584 config.AllocationSizes.push_back({1, 1024, 1024});
4585 }
4586 }
4587 // Large
4588 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4589 {
4590 // Varying size
4591 if(constantSizesIndex == 0)
4592 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4593 // Constant sizes
4594 else
4595 {
4596 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4597 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4598 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4599 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4600 }
4601 }
4602 }
4603 // Images present
4604 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4605 {
4606 // Small
4607 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4608 {
4609 // Varying size
4610 if(constantSizesIndex == 0)
4611 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4612 // Constant sizes
4613 else
4614 {
4615 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4616 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4617 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4618 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4619 }
4620 }
4621 // Large
4622 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4623 {
4624 // Varying size
4625 if(constantSizesIndex == 0)
4626 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4627 // Constant sizes
4628 else
4629 {
4630 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4631 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4632 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4633 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4634 }
4635 }
4636 }
4637
4638 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4639 size_t beginBytesToAllocateCount = 1;
4640 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4641 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4642 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4643 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4644 {
4645 std::string desc5 = desc4;
4646
4647 switch(beginBytesToAllocateIndex)
4648 {
4649 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004650 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004651 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4652 config.AdditionalOperationCount = 0;
4653 break;
4654 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004655 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004656 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4657 config.AdditionalOperationCount = 1024;
4658 break;
4659 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004660 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004661 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4662 config.AdditionalOperationCount = 1024;
4663 break;
4664 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004665 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004666 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4667 config.AdditionalOperationCount = 1024;
4668 break;
4669 default:
4670 assert(0);
4671 }
4672
Adam Sawicki0667e332018-08-24 17:26:44 +02004673 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004674 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004675 std::string desc6 = desc5;
4676 switch(strategyIndex)
4677 {
4678 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004679 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004680 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4681 break;
4682 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004683 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004684 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4685 break;
4686 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004687 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004688 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4689 break;
4690 default:
4691 assert(0);
4692 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004693
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004694 desc6 += ',';
4695 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004696
4697 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004698
4699 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4700 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004701 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004702
4703 Result result{};
4704 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004705 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004706 if(file)
4707 {
4708 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4709 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004710 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004711 }
4712 }
4713 }
4714 }
4715 }
4716 }
4717}
4718
4719static void PerformPoolTests(FILE* file)
4720{
4721 const size_t AVG_RESOURCES_PER_POOL = 300;
4722
4723 uint32_t repeatCount = 1;
4724 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4725
4726 PoolTestConfig config{};
4727 config.RandSeed = 2346343;
4728 config.FrameCount = 200;
4729 config.ItemsToMakeUnusedPercent = 2;
4730
4731 size_t threadCountCount = 1;
4732 switch(ConfigType)
4733 {
4734 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4735 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4736 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4737 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4738 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4739 default: assert(0);
4740 }
4741 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4742 {
4743 std::string desc1;
4744
4745 switch(threadCountIndex)
4746 {
4747 case 0:
4748 desc1 += "1_thread";
4749 config.ThreadCount = 1;
4750 break;
4751 case 1:
4752 desc1 += "16_threads";
4753 config.ThreadCount = 16;
4754 break;
4755 case 2:
4756 desc1 += "2_threads";
4757 config.ThreadCount = 2;
4758 break;
4759 default:
4760 assert(0);
4761 }
4762
4763 // 0 = buffers, 1 = images, 2 = buffers and images
4764 size_t buffersVsImagesCount = 2;
4765 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4766 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4767 {
4768 std::string desc2 = desc1;
4769 switch(buffersVsImagesIndex)
4770 {
4771 case 0: desc2 += " Buffers"; break;
4772 case 1: desc2 += " Images"; break;
4773 case 2: desc2 += " Buffers+Images"; break;
4774 default: assert(0);
4775 }
4776
4777 // 0 = small, 1 = large, 2 = small and large
4778 size_t smallVsLargeCount = 2;
4779 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4780 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4781 {
4782 std::string desc3 = desc2;
4783 switch(smallVsLargeIndex)
4784 {
4785 case 0: desc3 += " Small"; break;
4786 case 1: desc3 += " Large"; break;
4787 case 2: desc3 += " Small+Large"; break;
4788 default: assert(0);
4789 }
4790
4791 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4792 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4793 else
4794 config.PoolSize = 4ull * 1024 * 1024;
4795
4796 // 0 = varying sizes min...max, 1 = set of constant sizes
4797 size_t constantSizesCount = 1;
4798 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4799 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4800 {
4801 std::string desc4 = desc3;
4802 switch(constantSizesIndex)
4803 {
4804 case 0: desc4 += " Varying_sizes"; break;
4805 case 1: desc4 += " Constant_sizes"; break;
4806 default: assert(0);
4807 }
4808
4809 config.AllocationSizes.clear();
4810 // Buffers present
4811 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4812 {
4813 // Small
4814 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4815 {
4816 // Varying size
4817 if(constantSizesIndex == 0)
4818 config.AllocationSizes.push_back({4, 16, 1024});
4819 // Constant sizes
4820 else
4821 {
4822 config.AllocationSizes.push_back({1, 16, 16});
4823 config.AllocationSizes.push_back({1, 64, 64});
4824 config.AllocationSizes.push_back({1, 256, 256});
4825 config.AllocationSizes.push_back({1, 1024, 1024});
4826 }
4827 }
4828 // Large
4829 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4830 {
4831 // Varying size
4832 if(constantSizesIndex == 0)
4833 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4834 // Constant sizes
4835 else
4836 {
4837 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4838 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4839 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4840 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4841 }
4842 }
4843 }
4844 // Images present
4845 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4846 {
4847 // Small
4848 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4849 {
4850 // Varying size
4851 if(constantSizesIndex == 0)
4852 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4853 // Constant sizes
4854 else
4855 {
4856 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4857 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4858 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4859 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4860 }
4861 }
4862 // Large
4863 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4864 {
4865 // Varying size
4866 if(constantSizesIndex == 0)
4867 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4868 // Constant sizes
4869 else
4870 {
4871 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4872 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4873 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4874 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4875 }
4876 }
4877 }
4878
4879 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4880 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4881
4882 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4883 size_t subscriptionModeCount;
4884 switch(ConfigType)
4885 {
4886 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4887 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4888 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4889 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4890 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4891 default: assert(0);
4892 }
4893 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4894 {
4895 std::string desc5 = desc4;
4896
4897 switch(subscriptionModeIndex)
4898 {
4899 case 0:
4900 desc5 += " Subscription_66%";
4901 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4902 break;
4903 case 1:
4904 desc5 += " Subscription_133%";
4905 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4906 break;
4907 case 2:
4908 desc5 += " Subscription_100%";
4909 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4910 break;
4911 case 3:
4912 desc5 += " Subscription_33%";
4913 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4914 break;
4915 case 4:
4916 desc5 += " Subscription_166%";
4917 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4918 break;
4919 default:
4920 assert(0);
4921 }
4922
4923 config.TotalItemCount = config.UsedItemCountMax * 5;
4924 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4925
4926 const char* testDescription = desc5.c_str();
4927
4928 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4929 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004930 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004931
4932 PoolTestResult result{};
4933 g_MemoryAliasingWarningEnabled = false;
4934 TestPool_Benchmark(result, config);
4935 g_MemoryAliasingWarningEnabled = true;
4936 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4937 }
4938 }
4939 }
4940 }
4941 }
4942 }
4943}
4944
Adam Sawickia83793a2018-09-03 13:40:42 +02004945static void BasicTestBuddyAllocator()
4946{
4947 wprintf(L"Basic test buddy allocator\n");
4948
4949 RandomNumberGenerator rand{76543};
4950
4951 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4952 sampleBufCreateInfo.size = 1024; // Whatever.
4953 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4954
4955 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4956 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4957
4958 VmaPoolCreateInfo poolCreateInfo = {};
4959 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004960 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004961
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004962 // Deliberately adding 1023 to test usable size smaller than memory block size.
4963 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004964 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004965 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004966
4967 VmaPool pool = nullptr;
4968 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004969 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004970
4971 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4972
4973 VmaAllocationCreateInfo allocCreateInfo = {};
4974 allocCreateInfo.pool = pool;
4975
4976 std::vector<BufferInfo> bufInfo;
4977 BufferInfo newBufInfo;
4978 VmaAllocationInfo allocInfo;
4979
4980 bufCreateInfo.size = 1024 * 256;
4981 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4982 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004983 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004984 bufInfo.push_back(newBufInfo);
4985
4986 bufCreateInfo.size = 1024 * 512;
4987 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4988 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004989 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004990 bufInfo.push_back(newBufInfo);
4991
4992 bufCreateInfo.size = 1024 * 128;
4993 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4994 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004995 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004996 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004997
4998 // Test very small allocation, smaller than minimum node size.
4999 bufCreateInfo.size = 1;
5000 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5001 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005002 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02005003 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02005004
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005005 // Test some small allocation with alignment requirement.
5006 {
5007 VkMemoryRequirements memReq;
5008 memReq.alignment = 256;
5009 memReq.memoryTypeBits = UINT32_MAX;
5010 memReq.size = 32;
5011
5012 newBufInfo.Buffer = VK_NULL_HANDLE;
5013 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
5014 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005015 TEST(res == VK_SUCCESS);
5016 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005017 bufInfo.push_back(newBufInfo);
5018 }
5019
5020 //SaveAllocatorStatsToFile(L"TEST.json");
5021
Adam Sawicki21017c62018-09-07 15:26:59 +02005022 VmaPoolStats stats = {};
5023 vmaGetPoolStats(g_hAllocator, pool, &stats);
5024 int DBG = 0; // Set breakpoint here to inspect `stats`.
5025
Adam Sawicki80927152018-09-07 17:27:23 +02005026 // Allocate enough new buffers to surely fall into second block.
5027 for(uint32_t i = 0; i < 32; ++i)
5028 {
5029 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
5030 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5031 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005032 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02005033 bufInfo.push_back(newBufInfo);
5034 }
5035
5036 SaveAllocatorStatsToFile(L"BuddyTest01.json");
5037
Adam Sawickia83793a2018-09-03 13:40:42 +02005038 // Destroy the buffers in random order.
5039 while(!bufInfo.empty())
5040 {
5041 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
5042 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
5043 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
5044 bufInfo.erase(bufInfo.begin() + indexToDestroy);
5045 }
5046
5047 vmaDestroyPool(g_hAllocator, pool);
5048}
5049
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005050static void BasicTestAllocatePages()
5051{
5052 wprintf(L"Basic test allocate pages\n");
5053
5054 RandomNumberGenerator rand{765461};
5055
5056 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5057 sampleBufCreateInfo.size = 1024; // Whatever.
5058 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5059
5060 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5061 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5062
5063 VmaPoolCreateInfo poolCreateInfo = {};
5064 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02005065 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005066
5067 // 1 block of 1 MB.
5068 poolCreateInfo.blockSize = 1024 * 1024;
5069 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
5070
5071 // Create pool.
5072 VmaPool pool = nullptr;
5073 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02005074 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005075
5076 // Make 100 allocations of 4 KB - they should fit into the pool.
5077 VkMemoryRequirements memReq;
5078 memReq.memoryTypeBits = UINT32_MAX;
5079 memReq.alignment = 4 * 1024;
5080 memReq.size = 4 * 1024;
5081
5082 VmaAllocationCreateInfo allocCreateInfo = {};
5083 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5084 allocCreateInfo.pool = pool;
5085
5086 constexpr uint32_t allocCount = 100;
5087
5088 std::vector<VmaAllocation> alloc{allocCount};
5089 std::vector<VmaAllocationInfo> allocInfo{allocCount};
5090 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005091 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005092 for(uint32_t i = 0; i < allocCount; ++i)
5093 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005094 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005095 allocInfo[i].pMappedData != nullptr &&
5096 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5097 allocInfo[i].memoryType == allocInfo[0].memoryType);
5098 }
5099
5100 // Free the allocations.
5101 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5102 std::fill(alloc.begin(), alloc.end(), nullptr);
5103 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5104
5105 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5106 // Also test optional allocationInfo = null.
5107 memReq.size = 100 * 1024;
5108 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005109 TEST(res != VK_SUCCESS);
5110 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005111
5112 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5113 memReq.size = 4 * 1024;
5114 memReq.alignment = 128 * 1024;
5115 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005116 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005117
5118 // Make 100 dedicated allocations of 4 KB.
5119 memReq.alignment = 4 * 1024;
5120 memReq.size = 4 * 1024;
5121
5122 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5123 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5124 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5125 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005126 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005127 for(uint32_t i = 0; i < allocCount; ++i)
5128 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005129 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005130 allocInfo[i].pMappedData != nullptr &&
5131 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5132 allocInfo[i].offset == 0);
5133 if(i > 0)
5134 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005135 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005136 }
5137 }
5138
5139 // Free the allocations.
5140 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5141 std::fill(alloc.begin(), alloc.end(), nullptr);
5142 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5143
5144 vmaDestroyPool(g_hAllocator, pool);
5145}
5146
Adam Sawickif2975342018-10-16 13:49:02 +02005147// Test the testing environment.
5148static void TestGpuData()
5149{
5150 RandomNumberGenerator rand = { 53434 };
5151
5152 std::vector<AllocInfo> allocInfo;
5153
5154 for(size_t i = 0; i < 100; ++i)
5155 {
5156 AllocInfo info = {};
5157
5158 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5159 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5160 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5161 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5162 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5163
5164 VmaAllocationCreateInfo allocCreateInfo = {};
5165 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5166
5167 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5168 TEST(res == VK_SUCCESS);
5169
5170 info.m_StartValue = rand.Generate();
5171
5172 allocInfo.push_back(std::move(info));
5173 }
5174
5175 UploadGpuData(allocInfo.data(), allocInfo.size());
5176
5177 ValidateGpuData(allocInfo.data(), allocInfo.size());
5178
5179 DestroyAllAllocations(allocInfo);
5180}
5181
Adam Sawickib8333fb2018-03-13 16:15:53 +01005182void Test()
5183{
5184 wprintf(L"TESTING:\n");
5185
Adam Sawicki5c8af7b2018-12-10 13:34:54 +01005186 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005187 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005188 ////////////////////////////////////////////////////////////////////////////////
5189 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02005190 return;
5191 }
5192
Adam Sawickib8333fb2018-03-13 16:15:53 +01005193 // # Simple tests
5194
5195 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005196 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005197#if VMA_DEBUG_MARGIN
5198 TestDebugMargin();
5199#else
5200 TestPool_SameSize();
5201 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01005202 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02005203#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005204#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5205 TestAllocationsInitialization();
5206#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005207 TestMapping();
5208 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005209 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005210 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005211 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005212
Adam Sawicki4338f662018-09-07 14:12:37 +02005213 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005214 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005215
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005216 {
5217 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005218 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005219 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005220 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005221 fclose(file);
5222 }
5223
Adam Sawickib8333fb2018-03-13 16:15:53 +01005224 TestDefragmentationSimple();
5225 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005226 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005227 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005228
5229 // # Detailed tests
5230 FILE* file;
5231 fopen_s(&file, "Results.csv", "w");
5232 assert(file != NULL);
5233
5234 WriteMainTestResultHeader(file);
5235 PerformMainTests(file);
5236 //PerformCustomMainTest(file);
5237
5238 WritePoolTestResultHeader(file);
5239 PerformPoolTests(file);
5240 //PerformCustomPoolTest(file);
5241
5242 fclose(file);
5243
5244 wprintf(L"Done.\n");
5245}
5246
Adam Sawickif1a793c2018-03-13 15:42:22 +01005247#endif // #ifdef _WIN32