blob: 1810d65b6a4756662d391881b38cf4b64f0a67b9 [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
2// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +010029
30#ifdef _WIN32
31
Adam Sawicki33d2ce72018-08-27 13:59:13 +020032static const char* CODE_DESCRIPTION = "Foo";
33
Adam Sawickif2975342018-10-16 13:49:02 +020034extern VkCommandBuffer g_hTemporaryCommandBuffer;
35void BeginSingleTimeCommands();
36void EndSingleTimeCommands();
37
Adam Sawickibdb89a92018-12-13 11:56:30 +010038#ifndef VMA_DEBUG_MARGIN
39 #define VMA_DEBUG_MARGIN 0
40#endif
41
Adam Sawicki0a607132018-08-24 11:18:41 +020042enum CONFIG_TYPE {
43 CONFIG_TYPE_MINIMUM,
44 CONFIG_TYPE_SMALL,
45 CONFIG_TYPE_AVERAGE,
46 CONFIG_TYPE_LARGE,
47 CONFIG_TYPE_MAXIMUM,
48 CONFIG_TYPE_COUNT
49};
50
Adam Sawickif2975342018-10-16 13:49:02 +020051static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
52//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020053
Adam Sawickib8333fb2018-03-13 16:15:53 +010054enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
55
Adam Sawicki0667e332018-08-24 17:26:44 +020056static const char* FREE_ORDER_NAMES[] = {
57 "FORWARD",
58 "BACKWARD",
59 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020060};
61
Adam Sawicki80927152018-09-07 17:27:23 +020062// Copy of internal VmaAlgorithmToStr.
63static const char* AlgorithmToStr(uint32_t algorithm)
64{
65 switch(algorithm)
66 {
67 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
68 return "Linear";
69 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
70 return "Buddy";
71 case 0:
72 return "Default";
73 default:
74 assert(0);
75 return "";
76 }
77}
78
Adam Sawickib8333fb2018-03-13 16:15:53 +010079struct AllocationSize
80{
81 uint32_t Probability;
82 VkDeviceSize BufferSizeMin, BufferSizeMax;
83 uint32_t ImageSizeMin, ImageSizeMax;
84};
85
86struct Config
87{
88 uint32_t RandSeed;
89 VkDeviceSize BeginBytesToAllocate;
90 uint32_t AdditionalOperationCount;
91 VkDeviceSize MaxBytesToAllocate;
92 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
93 std::vector<AllocationSize> AllocationSizes;
94 uint32_t ThreadCount;
95 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
96 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020097 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010098};
99
100struct Result
101{
102 duration TotalTime;
103 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
104 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
105 VkDeviceSize TotalMemoryAllocated;
106 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
107};
108
109void TestDefragmentationSimple();
110void TestDefragmentationFull();
111
112struct PoolTestConfig
113{
114 uint32_t RandSeed;
115 uint32_t ThreadCount;
116 VkDeviceSize PoolSize;
117 uint32_t FrameCount;
118 uint32_t TotalItemCount;
119 // Range for number of items used in each frame.
120 uint32_t UsedItemCountMin, UsedItemCountMax;
121 // Percent of items to make unused, and possibly make some others used in each frame.
122 uint32_t ItemsToMakeUnusedPercent;
123 std::vector<AllocationSize> AllocationSizes;
124
125 VkDeviceSize CalcAvgResourceSize() const
126 {
127 uint32_t probabilitySum = 0;
128 VkDeviceSize sizeSum = 0;
129 for(size_t i = 0; i < AllocationSizes.size(); ++i)
130 {
131 const AllocationSize& allocSize = AllocationSizes[i];
132 if(allocSize.BufferSizeMax > 0)
133 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
134 else
135 {
136 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
137 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
138 }
139 probabilitySum += allocSize.Probability;
140 }
141 return sizeSum / probabilitySum;
142 }
143
144 bool UsesBuffers() const
145 {
146 for(size_t i = 0; i < AllocationSizes.size(); ++i)
147 if(AllocationSizes[i].BufferSizeMax > 0)
148 return true;
149 return false;
150 }
151
152 bool UsesImages() const
153 {
154 for(size_t i = 0; i < AllocationSizes.size(); ++i)
155 if(AllocationSizes[i].ImageSizeMax > 0)
156 return true;
157 return false;
158 }
159};
160
161struct PoolTestResult
162{
163 duration TotalTime;
164 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
165 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
166 size_t LostAllocationCount, LostAllocationTotalSize;
167 size_t FailedAllocationCount, FailedAllocationTotalSize;
168};
169
170static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
171
Adam Sawicki51fa9662018-10-03 13:44:29 +0200172uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200173
Adam Sawickib8333fb2018-03-13 16:15:53 +0100174struct BufferInfo
175{
176 VkBuffer Buffer = VK_NULL_HANDLE;
177 VmaAllocation Allocation = VK_NULL_HANDLE;
178};
179
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200180static uint32_t GetAllocationStrategyCount()
181{
182 uint32_t strategyCount = 0;
183 switch(ConfigType)
184 {
185 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
186 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
187 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
188 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
189 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
190 default: assert(0);
191 }
192 return strategyCount;
193}
194
195static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
196{
197 switch(allocStrategy)
198 {
199 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
200 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
201 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
202 case 0: return "Default"; break;
203 default: assert(0); return "";
204 }
205}
206
Adam Sawickib8333fb2018-03-13 16:15:53 +0100207static void InitResult(Result& outResult)
208{
209 outResult.TotalTime = duration::zero();
210 outResult.AllocationTimeMin = duration::max();
211 outResult.AllocationTimeAvg = duration::zero();
212 outResult.AllocationTimeMax = duration::min();
213 outResult.DeallocationTimeMin = duration::max();
214 outResult.DeallocationTimeAvg = duration::zero();
215 outResult.DeallocationTimeMax = duration::min();
216 outResult.TotalMemoryAllocated = 0;
217 outResult.FreeRangeSizeAvg = 0;
218 outResult.FreeRangeSizeMax = 0;
219}
220
221class TimeRegisterObj
222{
223public:
224 TimeRegisterObj(duration& min, duration& sum, duration& max) :
225 m_Min(min),
226 m_Sum(sum),
227 m_Max(max),
228 m_TimeBeg(std::chrono::high_resolution_clock::now())
229 {
230 }
231
232 ~TimeRegisterObj()
233 {
234 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
235 m_Sum += d;
236 if(d < m_Min) m_Min = d;
237 if(d > m_Max) m_Max = d;
238 }
239
240private:
241 duration& m_Min;
242 duration& m_Sum;
243 duration& m_Max;
244 time_point m_TimeBeg;
245};
246
247struct PoolTestThreadResult
248{
249 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
250 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
251 size_t AllocationCount, DeallocationCount;
252 size_t LostAllocationCount, LostAllocationTotalSize;
253 size_t FailedAllocationCount, FailedAllocationTotalSize;
254};
255
256class AllocationTimeRegisterObj : public TimeRegisterObj
257{
258public:
259 AllocationTimeRegisterObj(Result& result) :
260 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
261 {
262 }
263};
264
265class DeallocationTimeRegisterObj : public TimeRegisterObj
266{
267public:
268 DeallocationTimeRegisterObj(Result& result) :
269 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
270 {
271 }
272};
273
274class PoolAllocationTimeRegisterObj : public TimeRegisterObj
275{
276public:
277 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
278 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
279 {
280 }
281};
282
283class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
284{
285public:
286 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
287 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
288 {
289 }
290};
291
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200292static void CurrentTimeToStr(std::string& out)
293{
294 time_t rawTime; time(&rawTime);
295 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
296 char timeStr[128];
297 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
298 out = timeStr;
299}
300
Adam Sawickib8333fb2018-03-13 16:15:53 +0100301VkResult MainTest(Result& outResult, const Config& config)
302{
303 assert(config.ThreadCount > 0);
304
305 InitResult(outResult);
306
307 RandomNumberGenerator mainRand{config.RandSeed};
308
309 time_point timeBeg = std::chrono::high_resolution_clock::now();
310
311 std::atomic<size_t> allocationCount = 0;
312 VkResult res = VK_SUCCESS;
313
314 uint32_t memUsageProbabilitySum =
315 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
316 config.MemUsageProbability[2] + config.MemUsageProbability[3];
317 assert(memUsageProbabilitySum > 0);
318
319 uint32_t allocationSizeProbabilitySum = std::accumulate(
320 config.AllocationSizes.begin(),
321 config.AllocationSizes.end(),
322 0u,
323 [](uint32_t sum, const AllocationSize& allocSize) {
324 return sum + allocSize.Probability;
325 });
326
327 struct Allocation
328 {
329 VkBuffer Buffer;
330 VkImage Image;
331 VmaAllocation Alloc;
332 };
333
334 std::vector<Allocation> commonAllocations;
335 std::mutex commonAllocationsMutex;
336
337 auto Allocate = [&](
338 VkDeviceSize bufferSize,
339 const VkExtent2D imageExtent,
340 RandomNumberGenerator& localRand,
341 VkDeviceSize& totalAllocatedBytes,
342 std::vector<Allocation>& allocations) -> VkResult
343 {
344 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
345
346 uint32_t memUsageIndex = 0;
347 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
348 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
349 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
350
351 VmaAllocationCreateInfo memReq = {};
352 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200353 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100354
355 Allocation allocation = {};
356 VmaAllocationInfo allocationInfo;
357
358 // Buffer
359 if(bufferSize > 0)
360 {
361 assert(imageExtent.width == 0);
362 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
363 bufferInfo.size = bufferSize;
364 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
365
366 {
367 AllocationTimeRegisterObj timeRegisterObj{outResult};
368 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
369 }
370 }
371 // Image
372 else
373 {
374 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
375 imageInfo.imageType = VK_IMAGE_TYPE_2D;
376 imageInfo.extent.width = imageExtent.width;
377 imageInfo.extent.height = imageExtent.height;
378 imageInfo.extent.depth = 1;
379 imageInfo.mipLevels = 1;
380 imageInfo.arrayLayers = 1;
381 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
382 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
383 VK_IMAGE_TILING_OPTIMAL :
384 VK_IMAGE_TILING_LINEAR;
385 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
386 switch(memReq.usage)
387 {
388 case VMA_MEMORY_USAGE_GPU_ONLY:
389 switch(localRand.Generate() % 3)
390 {
391 case 0:
392 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
393 break;
394 case 1:
395 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
396 break;
397 case 2:
398 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
399 break;
400 }
401 break;
402 case VMA_MEMORY_USAGE_CPU_ONLY:
403 case VMA_MEMORY_USAGE_CPU_TO_GPU:
404 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
405 break;
406 case VMA_MEMORY_USAGE_GPU_TO_CPU:
407 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
408 break;
409 }
410 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
411 imageInfo.flags = 0;
412
413 {
414 AllocationTimeRegisterObj timeRegisterObj{outResult};
415 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
416 }
417 }
418
419 if(res == VK_SUCCESS)
420 {
421 ++allocationCount;
422 totalAllocatedBytes += allocationInfo.size;
423 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
424 if(useCommonAllocations)
425 {
426 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
427 commonAllocations.push_back(allocation);
428 }
429 else
430 allocations.push_back(allocation);
431 }
432 else
433 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200434 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100435 }
436 return res;
437 };
438
439 auto GetNextAllocationSize = [&](
440 VkDeviceSize& outBufSize,
441 VkExtent2D& outImageSize,
442 RandomNumberGenerator& localRand)
443 {
444 outBufSize = 0;
445 outImageSize = {0, 0};
446
447 uint32_t allocSizeIndex = 0;
448 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
449 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
450 r -= config.AllocationSizes[allocSizeIndex++].Probability;
451
452 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
453 if(allocSize.BufferSizeMax > 0)
454 {
455 assert(allocSize.ImageSizeMax == 0);
456 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
457 outBufSize = allocSize.BufferSizeMin;
458 else
459 {
460 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
461 outBufSize = outBufSize / 16 * 16;
462 }
463 }
464 else
465 {
466 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
467 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
468 else
469 {
470 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
471 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
472 }
473 }
474 };
475
476 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
477 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
478
479 auto ThreadProc = [&](uint32_t randSeed) -> void
480 {
481 RandomNumberGenerator threadRand(randSeed);
482 VkDeviceSize threadTotalAllocatedBytes = 0;
483 std::vector<Allocation> threadAllocations;
484 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
485 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
486 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
487
488 // BEGIN ALLOCATIONS
489 for(;;)
490 {
491 VkDeviceSize bufferSize = 0;
492 VkExtent2D imageExtent = {};
493 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
494 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
495 threadBeginBytesToAllocate)
496 {
497 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
498 break;
499 }
500 else
501 break;
502 }
503
504 // ADDITIONAL ALLOCATIONS AND FREES
505 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
506 {
507 VkDeviceSize bufferSize = 0;
508 VkExtent2D imageExtent = {};
509 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
510
511 // true = allocate, false = free
512 bool allocate = threadRand.Generate() % 2 != 0;
513
514 if(allocate)
515 {
516 if(threadTotalAllocatedBytes +
517 bufferSize +
518 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
519 threadMaxBytesToAllocate)
520 {
521 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
522 break;
523 }
524 }
525 else
526 {
527 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
528 if(useCommonAllocations)
529 {
530 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
531 if(!commonAllocations.empty())
532 {
533 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
534 VmaAllocationInfo allocationInfo;
535 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
536 if(threadTotalAllocatedBytes >= allocationInfo.size)
537 {
538 DeallocationTimeRegisterObj timeRegisterObj{outResult};
539 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
540 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
541 else
542 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
543 threadTotalAllocatedBytes -= allocationInfo.size;
544 commonAllocations.erase(commonAllocations.begin() + indexToFree);
545 }
546 }
547 }
548 else
549 {
550 if(!threadAllocations.empty())
551 {
552 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
553 VmaAllocationInfo allocationInfo;
554 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
555 if(threadTotalAllocatedBytes >= allocationInfo.size)
556 {
557 DeallocationTimeRegisterObj timeRegisterObj{outResult};
558 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
559 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
560 else
561 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
562 threadTotalAllocatedBytes -= allocationInfo.size;
563 threadAllocations.erase(threadAllocations.begin() + indexToFree);
564 }
565 }
566 }
567 }
568 }
569
570 ++numThreadsReachedMaxAllocations;
571
572 WaitForSingleObject(threadsFinishEvent, INFINITE);
573
574 // DEALLOCATION
575 while(!threadAllocations.empty())
576 {
577 size_t indexToFree = 0;
578 switch(config.FreeOrder)
579 {
580 case FREE_ORDER::FORWARD:
581 indexToFree = 0;
582 break;
583 case FREE_ORDER::BACKWARD:
584 indexToFree = threadAllocations.size() - 1;
585 break;
586 case FREE_ORDER::RANDOM:
587 indexToFree = mainRand.Generate() % threadAllocations.size();
588 break;
589 }
590
591 {
592 DeallocationTimeRegisterObj timeRegisterObj{outResult};
593 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
594 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
595 else
596 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
597 }
598 threadAllocations.erase(threadAllocations.begin() + indexToFree);
599 }
600 };
601
602 uint32_t threadRandSeed = mainRand.Generate();
603 std::vector<std::thread> bkgThreads;
604 for(size_t i = 0; i < config.ThreadCount; ++i)
605 {
606 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
607 }
608
609 // Wait for threads reached max allocations
610 while(numThreadsReachedMaxAllocations < config.ThreadCount)
611 Sleep(0);
612
613 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
614 VmaStats vmaStats = {};
615 vmaCalculateStats(g_hAllocator, &vmaStats);
616 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
617 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
618 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
619
620 // Signal threads to deallocate
621 SetEvent(threadsFinishEvent);
622
623 // Wait for threads finished
624 for(size_t i = 0; i < bkgThreads.size(); ++i)
625 bkgThreads[i].join();
626 bkgThreads.clear();
627
628 CloseHandle(threadsFinishEvent);
629
630 // Deallocate remaining common resources
631 while(!commonAllocations.empty())
632 {
633 size_t indexToFree = 0;
634 switch(config.FreeOrder)
635 {
636 case FREE_ORDER::FORWARD:
637 indexToFree = 0;
638 break;
639 case FREE_ORDER::BACKWARD:
640 indexToFree = commonAllocations.size() - 1;
641 break;
642 case FREE_ORDER::RANDOM:
643 indexToFree = mainRand.Generate() % commonAllocations.size();
644 break;
645 }
646
647 {
648 DeallocationTimeRegisterObj timeRegisterObj{outResult};
649 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
650 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
651 else
652 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
653 }
654 commonAllocations.erase(commonAllocations.begin() + indexToFree);
655 }
656
657 if(allocationCount)
658 {
659 outResult.AllocationTimeAvg /= allocationCount;
660 outResult.DeallocationTimeAvg /= allocationCount;
661 }
662
663 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
664
665 return res;
666}
667
Adam Sawicki51fa9662018-10-03 13:44:29 +0200668void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100669{
670 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200671 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100672 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200673 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100674}
675
676struct AllocInfo
677{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200678 VmaAllocation m_Allocation = VK_NULL_HANDLE;
679 VkBuffer m_Buffer = VK_NULL_HANDLE;
680 VkImage m_Image = VK_NULL_HANDLE;
681 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100682 union
683 {
684 VkBufferCreateInfo m_BufferInfo;
685 VkImageCreateInfo m_ImageInfo;
686 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200687
688 void CreateBuffer(
689 const VkBufferCreateInfo& bufCreateInfo,
690 const VmaAllocationCreateInfo& allocCreateInfo);
691 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100692};
693
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200694void AllocInfo::CreateBuffer(
695 const VkBufferCreateInfo& bufCreateInfo,
696 const VmaAllocationCreateInfo& allocCreateInfo)
697{
698 m_BufferInfo = bufCreateInfo;
699 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
700 TEST(res == VK_SUCCESS);
701}
702
703void AllocInfo::Destroy()
704{
705 if(m_Image)
706 {
707 vkDestroyImage(g_hDevice, m_Image, nullptr);
708 }
709 if(m_Buffer)
710 {
711 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
712 }
713 if(m_Allocation)
714 {
715 vmaFreeMemory(g_hAllocator, m_Allocation);
716 }
717}
718
Adam Sawickif2975342018-10-16 13:49:02 +0200719class StagingBufferCollection
720{
721public:
722 StagingBufferCollection() { }
723 ~StagingBufferCollection();
724 // Returns false if maximum total size of buffers would be exceeded.
725 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
726 void ReleaseAllBuffers();
727
728private:
729 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
730 struct BufInfo
731 {
732 VmaAllocation Allocation = VK_NULL_HANDLE;
733 VkBuffer Buffer = VK_NULL_HANDLE;
734 VkDeviceSize Size = VK_WHOLE_SIZE;
735 void* MappedPtr = nullptr;
736 bool Used = false;
737 };
738 std::vector<BufInfo> m_Bufs;
739 // Including both used and unused.
740 VkDeviceSize m_TotalSize = 0;
741};
742
743StagingBufferCollection::~StagingBufferCollection()
744{
745 for(size_t i = m_Bufs.size(); i--; )
746 {
747 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
748 }
749}
750
751bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
752{
753 assert(size <= MAX_TOTAL_SIZE);
754
755 // Try to find existing unused buffer with best size.
756 size_t bestIndex = SIZE_MAX;
757 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
758 {
759 BufInfo& currBufInfo = m_Bufs[i];
760 if(!currBufInfo.Used && currBufInfo.Size >= size &&
761 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
762 {
763 bestIndex = i;
764 }
765 }
766
767 if(bestIndex != SIZE_MAX)
768 {
769 m_Bufs[bestIndex].Used = true;
770 outBuffer = m_Bufs[bestIndex].Buffer;
771 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
772 return true;
773 }
774
775 // Allocate new buffer with requested size.
776 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
777 {
778 BufInfo bufInfo;
779 bufInfo.Size = size;
780 bufInfo.Used = true;
781
782 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
783 bufCreateInfo.size = size;
784 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
785
786 VmaAllocationCreateInfo allocCreateInfo = {};
787 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
788 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
789
790 VmaAllocationInfo allocInfo;
791 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
792 bufInfo.MappedPtr = allocInfo.pMappedData;
793 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
794
795 outBuffer = bufInfo.Buffer;
796 outMappedPtr = bufInfo.MappedPtr;
797
798 m_Bufs.push_back(std::move(bufInfo));
799
800 m_TotalSize += size;
801
802 return true;
803 }
804
805 // There are some unused but smaller buffers: Free them and try again.
806 bool hasUnused = false;
807 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
808 {
809 if(!m_Bufs[i].Used)
810 {
811 hasUnused = true;
812 break;
813 }
814 }
815 if(hasUnused)
816 {
817 for(size_t i = m_Bufs.size(); i--; )
818 {
819 if(!m_Bufs[i].Used)
820 {
821 m_TotalSize -= m_Bufs[i].Size;
822 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
823 m_Bufs.erase(m_Bufs.begin() + i);
824 }
825 }
826
827 return AcquireBuffer(size, outBuffer, outMappedPtr);
828 }
829
830 return false;
831}
832
833void StagingBufferCollection::ReleaseAllBuffers()
834{
835 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
836 {
837 m_Bufs[i].Used = false;
838 }
839}
840
841static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
842{
843 StagingBufferCollection stagingBufs;
844
845 bool cmdBufferStarted = false;
846 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
847 {
848 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
849 if(currAllocInfo.m_Buffer)
850 {
851 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
852
853 VkBuffer stagingBuf = VK_NULL_HANDLE;
854 void* stagingBufMappedPtr = nullptr;
855 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
856 {
857 TEST(cmdBufferStarted);
858 EndSingleTimeCommands();
859 stagingBufs.ReleaseAllBuffers();
860 cmdBufferStarted = false;
861
862 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
863 TEST(ok);
864 }
865
866 // Fill staging buffer.
867 {
868 assert(size % sizeof(uint32_t) == 0);
869 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
870 uint32_t val = currAllocInfo.m_StartValue;
871 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
872 {
873 *stagingValPtr = val;
874 ++stagingValPtr;
875 ++val;
876 }
877 }
878
879 // Issue copy command from staging buffer to destination buffer.
880 if(!cmdBufferStarted)
881 {
882 cmdBufferStarted = true;
883 BeginSingleTimeCommands();
884 }
885
886 VkBufferCopy copy = {};
887 copy.srcOffset = 0;
888 copy.dstOffset = 0;
889 copy.size = size;
890 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
891 }
892 else
893 {
894 TEST(0 && "Images not currently supported.");
895 }
896 }
897
898 if(cmdBufferStarted)
899 {
900 EndSingleTimeCommands();
901 stagingBufs.ReleaseAllBuffers();
902 }
903}
904
905static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
906{
907 StagingBufferCollection stagingBufs;
908
909 bool cmdBufferStarted = false;
910 size_t validateAllocIndexOffset = 0;
911 std::vector<void*> validateStagingBuffers;
912 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
913 {
914 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
915 if(currAllocInfo.m_Buffer)
916 {
917 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
918
919 VkBuffer stagingBuf = VK_NULL_HANDLE;
920 void* stagingBufMappedPtr = nullptr;
921 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
922 {
923 TEST(cmdBufferStarted);
924 EndSingleTimeCommands();
925 cmdBufferStarted = false;
926
927 for(size_t validateIndex = 0;
928 validateIndex < validateStagingBuffers.size();
929 ++validateIndex)
930 {
931 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
932 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
933 TEST(validateSize % sizeof(uint32_t) == 0);
934 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
935 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
936 bool valid = true;
937 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
938 {
939 if(*stagingValPtr != val)
940 {
941 valid = false;
942 break;
943 }
944 ++stagingValPtr;
945 ++val;
946 }
947 TEST(valid);
948 }
949
950 stagingBufs.ReleaseAllBuffers();
951
952 validateAllocIndexOffset = allocInfoIndex;
953 validateStagingBuffers.clear();
954
955 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
956 TEST(ok);
957 }
958
959 // Issue copy command from staging buffer to destination buffer.
960 if(!cmdBufferStarted)
961 {
962 cmdBufferStarted = true;
963 BeginSingleTimeCommands();
964 }
965
966 VkBufferCopy copy = {};
967 copy.srcOffset = 0;
968 copy.dstOffset = 0;
969 copy.size = size;
970 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
971
972 // Sava mapped pointer for later validation.
973 validateStagingBuffers.push_back(stagingBufMappedPtr);
974 }
975 else
976 {
977 TEST(0 && "Images not currently supported.");
978 }
979 }
980
981 if(cmdBufferStarted)
982 {
983 EndSingleTimeCommands();
984
985 for(size_t validateIndex = 0;
986 validateIndex < validateStagingBuffers.size();
987 ++validateIndex)
988 {
989 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
990 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
991 TEST(validateSize % sizeof(uint32_t) == 0);
992 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
993 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
994 bool valid = true;
995 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
996 {
997 if(*stagingValPtr != val)
998 {
999 valid = false;
1000 break;
1001 }
1002 ++stagingValPtr;
1003 ++val;
1004 }
1005 TEST(valid);
1006 }
1007
1008 stagingBufs.ReleaseAllBuffers();
1009 }
1010}
1011
Adam Sawickib8333fb2018-03-13 16:15:53 +01001012static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1013{
1014 outMemReq = {};
1015 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1016 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1017}
1018
1019static void CreateBuffer(
1020 VmaPool pool,
1021 const VkBufferCreateInfo& bufCreateInfo,
1022 bool persistentlyMapped,
1023 AllocInfo& outAllocInfo)
1024{
1025 outAllocInfo = {};
1026 outAllocInfo.m_BufferInfo = bufCreateInfo;
1027
1028 VmaAllocationCreateInfo allocCreateInfo = {};
1029 allocCreateInfo.pool = pool;
1030 if(persistentlyMapped)
1031 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1032
1033 VmaAllocationInfo vmaAllocInfo = {};
1034 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1035
1036 // Setup StartValue and fill.
1037 {
1038 outAllocInfo.m_StartValue = (uint32_t)rand();
1039 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001040 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001041 if(!persistentlyMapped)
1042 {
1043 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1044 }
1045
1046 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001047 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001048 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1049 data[i] = value++;
1050
1051 if(!persistentlyMapped)
1052 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1053 }
1054}
1055
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001056static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001057{
1058 outAllocation.m_Allocation = nullptr;
1059 outAllocation.m_Buffer = nullptr;
1060 outAllocation.m_Image = nullptr;
1061 outAllocation.m_StartValue = (uint32_t)rand();
1062
1063 VmaAllocationCreateInfo vmaMemReq;
1064 GetMemReq(vmaMemReq);
1065
1066 VmaAllocationInfo allocInfo;
1067
1068 const bool isBuffer = true;//(rand() & 0x1) != 0;
1069 const bool isLarge = (rand() % 16) == 0;
1070 if(isBuffer)
1071 {
1072 const uint32_t bufferSize = isLarge ?
1073 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1074 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1075
1076 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1077 bufferInfo.size = bufferSize;
1078 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1079
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001080 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001081 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001082 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 }
1084 else
1085 {
1086 const uint32_t imageSizeX = isLarge ?
1087 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1088 rand() % 1024 + 1; // 1 ... 1024
1089 const uint32_t imageSizeY = isLarge ?
1090 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1091 rand() % 1024 + 1; // 1 ... 1024
1092
1093 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1094 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1095 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1096 imageInfo.extent.width = imageSizeX;
1097 imageInfo.extent.height = imageSizeY;
1098 imageInfo.extent.depth = 1;
1099 imageInfo.mipLevels = 1;
1100 imageInfo.arrayLayers = 1;
1101 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1102 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1103 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1104 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1105
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001106 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001107 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001108 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001109 }
1110
1111 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1112 if(allocInfo.pMappedData == nullptr)
1113 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001114 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001115 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001116 }
1117
1118 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001119 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001120 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1121 data[i] = value++;
1122
1123 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001124 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001125}
1126
1127static void DestroyAllocation(const AllocInfo& allocation)
1128{
1129 if(allocation.m_Buffer)
1130 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1131 else
1132 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1133}
1134
1135static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1136{
1137 for(size_t i = allocations.size(); i--; )
1138 DestroyAllocation(allocations[i]);
1139 allocations.clear();
1140}
1141
1142static void ValidateAllocationData(const AllocInfo& allocation)
1143{
1144 VmaAllocationInfo allocInfo;
1145 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1146
1147 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1148 if(allocInfo.pMappedData == nullptr)
1149 {
1150 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001151 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001152 }
1153
1154 uint32_t value = allocation.m_StartValue;
1155 bool ok = true;
1156 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001157 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001158 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1159 {
1160 if(data[i] != value++)
1161 {
1162 ok = false;
1163 break;
1164 }
1165 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001166 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001167
1168 if(allocInfo.pMappedData == nullptr)
1169 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1170}
1171
1172static void RecreateAllocationResource(AllocInfo& allocation)
1173{
1174 VmaAllocationInfo allocInfo;
1175 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1176
1177 if(allocation.m_Buffer)
1178 {
1179 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1180
1181 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001182 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001183
1184 // Just to silence validation layer warnings.
1185 VkMemoryRequirements vkMemReq;
1186 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001187 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001188
1189 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001190 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001191 }
1192 else
1193 {
1194 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1195
1196 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001197 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001198
1199 // Just to silence validation layer warnings.
1200 VkMemoryRequirements vkMemReq;
1201 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1202
1203 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001204 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001205 }
1206}
1207
1208static void Defragment(AllocInfo* allocs, size_t allocCount,
1209 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1210 VmaDefragmentationStats* defragmentationStats = nullptr)
1211{
1212 std::vector<VmaAllocation> vmaAllocs(allocCount);
1213 for(size_t i = 0; i < allocCount; ++i)
1214 vmaAllocs[i] = allocs[i].m_Allocation;
1215
1216 std::vector<VkBool32> allocChanged(allocCount);
1217
1218 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1219 defragmentationInfo, defragmentationStats) );
1220
1221 for(size_t i = 0; i < allocCount; ++i)
1222 {
1223 if(allocChanged[i])
1224 {
1225 RecreateAllocationResource(allocs[i]);
1226 }
1227 }
1228}
1229
1230static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1231{
1232 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1233 ValidateAllocationData(allocInfo);
1234 });
1235}
1236
1237void TestDefragmentationSimple()
1238{
1239 wprintf(L"Test defragmentation simple\n");
1240
1241 RandomNumberGenerator rand(667);
1242
1243 const VkDeviceSize BUF_SIZE = 0x10000;
1244 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1245
1246 const VkDeviceSize MIN_BUF_SIZE = 32;
1247 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1248 auto RandomBufSize = [&]() -> VkDeviceSize {
1249 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1250 };
1251
1252 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1253 bufCreateInfo.size = BUF_SIZE;
1254 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1255
1256 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1257 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1258
1259 uint32_t memTypeIndex = UINT32_MAX;
1260 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1261
1262 VmaPoolCreateInfo poolCreateInfo = {};
1263 poolCreateInfo.blockSize = BLOCK_SIZE;
1264 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1265
1266 VmaPool pool;
1267 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1268
Adam Sawickie1681912018-11-23 17:50:12 +01001269 // Defragmentation of empty pool.
1270 {
1271 VmaDefragmentationInfo2 defragInfo = {};
1272 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1273 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1274 defragInfo.poolCount = 1;
1275 defragInfo.pPools = &pool;
1276
1277 VmaDefragmentationStats defragStats = {};
1278 VmaDefragmentationContext defragCtx = nullptr;
1279 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1280 TEST(res >= VK_SUCCESS);
1281 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1282 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1283 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1284 }
1285
Adam Sawickib8333fb2018-03-13 16:15:53 +01001286 std::vector<AllocInfo> allocations;
1287
1288 // persistentlyMappedOption = 0 - not persistently mapped.
1289 // persistentlyMappedOption = 1 - persistently mapped.
1290 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1291 {
1292 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1293 const bool persistentlyMapped = persistentlyMappedOption != 0;
1294
1295 // # Test 1
1296 // Buffers of fixed size.
1297 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1298 // Expected result: at least 1 block freed.
1299 {
1300 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1301 {
1302 AllocInfo allocInfo;
1303 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1304 allocations.push_back(allocInfo);
1305 }
1306
1307 for(size_t i = 1; i < allocations.size(); ++i)
1308 {
1309 DestroyAllocation(allocations[i]);
1310 allocations.erase(allocations.begin() + i);
1311 }
1312
1313 VmaDefragmentationStats defragStats;
1314 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001315 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1316 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001317
1318 ValidateAllocationsData(allocations.data(), allocations.size());
1319
1320 DestroyAllAllocations(allocations);
1321 }
1322
1323 // # Test 2
1324 // Buffers of fixed size.
1325 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1326 // Expected result: Each of 4 interations makes some progress.
1327 {
1328 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1329 {
1330 AllocInfo allocInfo;
1331 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1332 allocations.push_back(allocInfo);
1333 }
1334
1335 for(size_t i = 1; i < allocations.size(); ++i)
1336 {
1337 DestroyAllocation(allocations[i]);
1338 allocations.erase(allocations.begin() + i);
1339 }
1340
1341 VmaDefragmentationInfo defragInfo = {};
1342 defragInfo.maxAllocationsToMove = 1;
1343 defragInfo.maxBytesToMove = BUF_SIZE;
1344
1345 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1346 {
1347 VmaDefragmentationStats defragStats;
1348 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001349 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001350 }
1351
1352 ValidateAllocationsData(allocations.data(), allocations.size());
1353
1354 DestroyAllAllocations(allocations);
1355 }
1356
1357 // # Test 3
1358 // Buffers of variable size.
1359 // Create a number of buffers. Remove some percent of them.
1360 // Defragment while having some percent of them unmovable.
1361 // Expected result: Just simple validation.
1362 {
1363 for(size_t i = 0; i < 100; ++i)
1364 {
1365 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1366 localBufCreateInfo.size = RandomBufSize();
1367
1368 AllocInfo allocInfo;
1369 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1370 allocations.push_back(allocInfo);
1371 }
1372
1373 const uint32_t percentToDelete = 60;
1374 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1375 for(size_t i = 0; i < numberToDelete; ++i)
1376 {
1377 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1378 DestroyAllocation(allocations[indexToDelete]);
1379 allocations.erase(allocations.begin() + indexToDelete);
1380 }
1381
1382 // Non-movable allocations will be at the beginning of allocations array.
1383 const uint32_t percentNonMovable = 20;
1384 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1385 for(size_t i = 0; i < numberNonMovable; ++i)
1386 {
1387 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1388 if(indexNonMovable != i)
1389 std::swap(allocations[i], allocations[indexNonMovable]);
1390 }
1391
1392 VmaDefragmentationStats defragStats;
1393 Defragment(
1394 allocations.data() + numberNonMovable,
1395 allocations.size() - numberNonMovable,
1396 nullptr, &defragStats);
1397
1398 ValidateAllocationsData(allocations.data(), allocations.size());
1399
1400 DestroyAllAllocations(allocations);
1401 }
1402 }
1403
Adam Sawicki647cf242018-11-23 17:58:00 +01001404 /*
1405 Allocation that must be move to an overlapping place using memmove().
1406 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1407 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001408 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001409 {
1410 AllocInfo allocInfo[2];
1411
1412 bufCreateInfo.size = BUF_SIZE;
1413 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1414 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1415 bufCreateInfo.size = biggerBufSize;
1416 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1417
1418 DestroyAllocation(allocInfo[0]);
1419
1420 VmaDefragmentationStats defragStats;
1421 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1422 // If this fails, it means we couldn't do memmove with overlapping regions.
1423 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1424
1425 ValidateAllocationsData(&allocInfo[1], 1);
1426 DestroyAllocation(allocInfo[1]);
1427 }
1428
Adam Sawickib8333fb2018-03-13 16:15:53 +01001429 vmaDestroyPool(g_hAllocator, pool);
1430}
1431
Adam Sawicki52076eb2018-11-22 16:14:50 +01001432void TestDefragmentationWholePool()
1433{
1434 wprintf(L"Test defragmentation whole pool\n");
1435
1436 RandomNumberGenerator rand(668);
1437
1438 const VkDeviceSize BUF_SIZE = 0x10000;
1439 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1440
1441 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1442 bufCreateInfo.size = BUF_SIZE;
1443 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1444
1445 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1446 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1447
1448 uint32_t memTypeIndex = UINT32_MAX;
1449 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1450
1451 VmaPoolCreateInfo poolCreateInfo = {};
1452 poolCreateInfo.blockSize = BLOCK_SIZE;
1453 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1454
1455 VmaDefragmentationStats defragStats[2];
1456 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1457 {
1458 VmaPool pool;
1459 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1460
1461 std::vector<AllocInfo> allocations;
1462
1463 // Buffers of fixed size.
1464 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1465 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1466 {
1467 AllocInfo allocInfo;
1468 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1469 allocations.push_back(allocInfo);
1470 }
1471
1472 for(size_t i = 1; i < allocations.size(); ++i)
1473 {
1474 DestroyAllocation(allocations[i]);
1475 allocations.erase(allocations.begin() + i);
1476 }
1477
1478 VmaDefragmentationInfo2 defragInfo = {};
1479 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1480 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1481 std::vector<VmaAllocation> allocationsToDefrag;
1482 if(caseIndex == 0)
1483 {
1484 defragInfo.poolCount = 1;
1485 defragInfo.pPools = &pool;
1486 }
1487 else
1488 {
1489 const size_t allocCount = allocations.size();
1490 allocationsToDefrag.resize(allocCount);
1491 std::transform(
1492 allocations.begin(), allocations.end(),
1493 allocationsToDefrag.begin(),
1494 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1495 defragInfo.allocationCount = (uint32_t)allocCount;
1496 defragInfo.pAllocations = allocationsToDefrag.data();
1497 }
1498
1499 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1500 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1501 TEST(res >= VK_SUCCESS);
1502 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1503
1504 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1505
1506 ValidateAllocationsData(allocations.data(), allocations.size());
1507
1508 DestroyAllAllocations(allocations);
1509
1510 vmaDestroyPool(g_hAllocator, pool);
1511 }
1512
1513 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1514 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1515 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1516 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1517}
1518
Adam Sawickib8333fb2018-03-13 16:15:53 +01001519void TestDefragmentationFull()
1520{
1521 std::vector<AllocInfo> allocations;
1522
1523 // Create initial allocations.
1524 for(size_t i = 0; i < 400; ++i)
1525 {
1526 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001527 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001528 allocations.push_back(allocation);
1529 }
1530
1531 // Delete random allocations
1532 const size_t allocationsToDeletePercent = 80;
1533 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1534 for(size_t i = 0; i < allocationsToDelete; ++i)
1535 {
1536 size_t index = (size_t)rand() % allocations.size();
1537 DestroyAllocation(allocations[index]);
1538 allocations.erase(allocations.begin() + index);
1539 }
1540
1541 for(size_t i = 0; i < allocations.size(); ++i)
1542 ValidateAllocationData(allocations[i]);
1543
Adam Sawicki0667e332018-08-24 17:26:44 +02001544 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001545
1546 {
1547 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1548 for(size_t i = 0; i < allocations.size(); ++i)
1549 vmaAllocations[i] = allocations[i].m_Allocation;
1550
1551 const size_t nonMovablePercent = 0;
1552 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1553 for(size_t i = 0; i < nonMovableCount; ++i)
1554 {
1555 size_t index = (size_t)rand() % vmaAllocations.size();
1556 vmaAllocations.erase(vmaAllocations.begin() + index);
1557 }
1558
1559 const uint32_t defragCount = 1;
1560 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1561 {
1562 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1563
1564 VmaDefragmentationInfo defragmentationInfo;
1565 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1566 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1567
1568 wprintf(L"Defragmentation #%u\n", defragIndex);
1569
1570 time_point begTime = std::chrono::high_resolution_clock::now();
1571
1572 VmaDefragmentationStats stats;
1573 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001574 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001575
1576 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1577
1578 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1579 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1580 wprintf(L"Time: %.2f s\n", defragmentDuration);
1581
1582 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1583 {
1584 if(allocationsChanged[i])
1585 {
1586 RecreateAllocationResource(allocations[i]);
1587 }
1588 }
1589
1590 for(size_t i = 0; i < allocations.size(); ++i)
1591 ValidateAllocationData(allocations[i]);
1592
Adam Sawicki0667e332018-08-24 17:26:44 +02001593 //wchar_t fileName[MAX_PATH];
1594 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1595 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001596 }
1597 }
1598
1599 // Destroy all remaining allocations.
1600 DestroyAllAllocations(allocations);
1601}
1602
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001603static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001604{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001605 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001606 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001607
1608 std::vector<AllocInfo> allocations;
1609
1610 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001611 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1612 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001613 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001614 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1615 const size_t percentToLeave = 30;
1616 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001617 RandomNumberGenerator rand = { 234522 };
1618
1619 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001620
1621 VmaAllocationCreateInfo allocCreateInfo = {};
1622 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001623 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001624
1625 // Create all intended buffers.
1626 for(size_t i = 0; i < bufCount; ++i)
1627 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001628 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1629
1630 if(rand.Generate() % 100 < percentNonMovable)
1631 {
1632 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1633 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1634 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1635 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1636 }
1637 else
1638 {
1639 // Different usage just to see different color in output from VmaDumpVis.
1640 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1641 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1642 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1643 // And in JSON dump.
1644 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1645 }
1646
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001647 AllocInfo alloc;
1648 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1649 alloc.m_StartValue = rand.Generate();
1650 allocations.push_back(alloc);
1651 }
1652
1653 // Destroy some percentage of them.
1654 {
1655 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1656 for(size_t i = 0; i < buffersToDestroy; ++i)
1657 {
1658 const size_t index = rand.Generate() % allocations.size();
1659 allocations[index].Destroy();
1660 allocations.erase(allocations.begin() + index);
1661 }
1662 }
1663
1664 // Fill them with meaningful data.
1665 UploadGpuData(allocations.data(), allocations.size());
1666
Adam Sawickic6ede152018-11-16 17:04:14 +01001667 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001668 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001669 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001670
1671 // Defragment using GPU only.
1672 {
1673 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001674
Adam Sawickic6ede152018-11-16 17:04:14 +01001675 std::vector<VmaAllocation> allocationPtrs;
1676 std::vector<VkBool32> allocationChanged;
1677 std::vector<size_t> allocationOriginalIndex;
1678
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001679 for(size_t i = 0; i < allocCount; ++i)
1680 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001681 VmaAllocationInfo allocInfo = {};
1682 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1683 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1684 {
1685 allocationPtrs.push_back(allocations[i].m_Allocation);
1686 allocationChanged.push_back(VK_FALSE);
1687 allocationOriginalIndex.push_back(i);
1688 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001689 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001690
1691 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001692
1693 BeginSingleTimeCommands();
1694
1695 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001696 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001697 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001698 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001699 defragInfo.pAllocationsChanged = allocationChanged.data();
1700 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001701 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1702 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1703
1704 VmaDefragmentationStats stats = {};
1705 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1706 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1707 TEST(res >= VK_SUCCESS);
1708
1709 EndSingleTimeCommands();
1710
1711 vmaDefragmentationEnd(g_hAllocator, ctx);
1712
Adam Sawickic6ede152018-11-16 17:04:14 +01001713 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001714 {
1715 if(allocationChanged[i])
1716 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001717 const size_t origAllocIndex = allocationOriginalIndex[i];
1718 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001719 }
1720 }
1721
Adam Sawicki440307e2018-10-18 15:05:19 +02001722 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1723 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001724 }
1725
1726 ValidateGpuData(allocations.data(), allocations.size());
1727
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001728 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001729 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001730
1731 // Destroy all remaining buffers.
1732 for(size_t i = allocations.size(); i--; )
1733 {
1734 allocations[i].Destroy();
1735 }
Adam Sawicki05704002018-11-08 16:07:29 +01001736
1737 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001738}
1739
Adam Sawickib8333fb2018-03-13 16:15:53 +01001740static void TestUserData()
1741{
1742 VkResult res;
1743
1744 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1745 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1746 bufCreateInfo.size = 0x10000;
1747
1748 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1749 {
1750 // Opaque pointer
1751 {
1752
1753 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1754 void* pointerToSomething = &res;
1755
1756 VmaAllocationCreateInfo allocCreateInfo = {};
1757 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1758 allocCreateInfo.pUserData = numberAsPointer;
1759 if(testIndex == 1)
1760 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1761
1762 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1763 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001764 TEST(res == VK_SUCCESS);
1765 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001766
1767 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001768 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001769
1770 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1771 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001772 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001773
1774 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1775 }
1776
1777 // String
1778 {
1779 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1780 const char* name2 = "2";
1781 const size_t name1Len = strlen(name1);
1782
1783 char* name1Buf = new char[name1Len + 1];
1784 strcpy_s(name1Buf, name1Len + 1, name1);
1785
1786 VmaAllocationCreateInfo allocCreateInfo = {};
1787 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1788 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1789 allocCreateInfo.pUserData = name1Buf;
1790 if(testIndex == 1)
1791 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1792
1793 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1794 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001795 TEST(res == VK_SUCCESS);
1796 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1797 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001798
1799 delete[] name1Buf;
1800
1801 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001802 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001803
1804 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1805 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001806 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001807
1808 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1809 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001810 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001811
1812 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1813 }
1814 }
1815}
1816
Adam Sawicki370ab182018-11-08 16:31:00 +01001817static void TestInvalidAllocations()
1818{
1819 VkResult res;
1820
1821 VmaAllocationCreateInfo allocCreateInfo = {};
1822 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1823
1824 // Try to allocate 0 bytes.
1825 {
1826 VkMemoryRequirements memReq = {};
1827 memReq.size = 0; // !!!
1828 memReq.alignment = 4;
1829 memReq.memoryTypeBits = UINT32_MAX;
1830 VmaAllocation alloc = VK_NULL_HANDLE;
1831 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1832 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1833 }
1834
1835 // Try to create buffer with size = 0.
1836 {
1837 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1838 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1839 bufCreateInfo.size = 0; // !!!
1840 VkBuffer buf = VK_NULL_HANDLE;
1841 VmaAllocation alloc = VK_NULL_HANDLE;
1842 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1843 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1844 }
1845
1846 // Try to create image with one dimension = 0.
1847 {
1848 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1849 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1850 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1851 imageCreateInfo.extent.width = 128;
1852 imageCreateInfo.extent.height = 0; // !!!
1853 imageCreateInfo.extent.depth = 1;
1854 imageCreateInfo.mipLevels = 1;
1855 imageCreateInfo.arrayLayers = 1;
1856 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1857 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1858 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1859 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1860 VkImage image = VK_NULL_HANDLE;
1861 VmaAllocation alloc = VK_NULL_HANDLE;
1862 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1863 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1864 }
1865}
1866
Adam Sawickib8333fb2018-03-13 16:15:53 +01001867static void TestMemoryRequirements()
1868{
1869 VkResult res;
1870 VkBuffer buf;
1871 VmaAllocation alloc;
1872 VmaAllocationInfo allocInfo;
1873
1874 const VkPhysicalDeviceMemoryProperties* memProps;
1875 vmaGetMemoryProperties(g_hAllocator, &memProps);
1876
1877 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1878 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1879 bufInfo.size = 128;
1880
1881 VmaAllocationCreateInfo allocCreateInfo = {};
1882
1883 // No requirements.
1884 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001885 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001886 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1887
1888 // Usage.
1889 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1890 allocCreateInfo.requiredFlags = 0;
1891 allocCreateInfo.preferredFlags = 0;
1892 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1893
1894 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001895 TEST(res == VK_SUCCESS);
1896 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001897 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1898
1899 // Required flags, preferred flags.
1900 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1901 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1902 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1903 allocCreateInfo.memoryTypeBits = 0;
1904
1905 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001906 TEST(res == VK_SUCCESS);
1907 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1908 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001909 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1910
1911 // memoryTypeBits.
1912 const uint32_t memType = allocInfo.memoryType;
1913 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1914 allocCreateInfo.requiredFlags = 0;
1915 allocCreateInfo.preferredFlags = 0;
1916 allocCreateInfo.memoryTypeBits = 1u << memType;
1917
1918 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001919 TEST(res == VK_SUCCESS);
1920 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001921 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1922
1923}
1924
1925static void TestBasics()
1926{
1927 VkResult res;
1928
1929 TestMemoryRequirements();
1930
1931 // Lost allocation
1932 {
1933 VmaAllocation alloc = VK_NULL_HANDLE;
1934 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001935 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001936
1937 VmaAllocationInfo allocInfo;
1938 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001939 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1940 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001941
1942 vmaFreeMemory(g_hAllocator, alloc);
1943 }
1944
1945 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1946 {
1947 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1948 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1949 bufCreateInfo.size = 128;
1950
1951 VmaAllocationCreateInfo allocCreateInfo = {};
1952 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1953 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1954
1955 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1956 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001957 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001958
1959 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1960
1961 // Same with OWN_MEMORY.
1962 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1963
1964 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001965 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001966
1967 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1968 }
1969
1970 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001971
1972 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001973}
1974
1975void TestHeapSizeLimit()
1976{
1977 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1978 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1979
1980 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1981 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1982 {
1983 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1984 }
1985
1986 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1987 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1988 allocatorCreateInfo.device = g_hDevice;
1989 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1990
1991 VmaAllocator hAllocator;
1992 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001993 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001994
1995 struct Item
1996 {
1997 VkBuffer hBuf;
1998 VmaAllocation hAlloc;
1999 };
2000 std::vector<Item> items;
2001
2002 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2003 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2004
2005 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
2006 VmaAllocationInfo ownAllocInfo;
2007 {
2008 VmaAllocationCreateInfo allocCreateInfo = {};
2009 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2010 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2011
2012 bufCreateInfo.size = BLOCK_SIZE / 2;
2013
2014 for(size_t i = 0; i < 2; ++i)
2015 {
2016 Item item;
2017 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002018 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002019 items.push_back(item);
2020 }
2021 }
2022
2023 // Create pool to make sure allocations must be out of this memory type.
2024 VmaPoolCreateInfo poolCreateInfo = {};
2025 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
2026 poolCreateInfo.blockSize = BLOCK_SIZE;
2027
2028 VmaPool hPool;
2029 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002030 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002031
2032 // 2. Allocate normal buffers from all the remaining memory.
2033 {
2034 VmaAllocationCreateInfo allocCreateInfo = {};
2035 allocCreateInfo.pool = hPool;
2036
2037 bufCreateInfo.size = BLOCK_SIZE / 2;
2038
2039 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2040 for(size_t i = 0; i < bufCount; ++i)
2041 {
2042 Item item;
2043 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002044 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002045 items.push_back(item);
2046 }
2047 }
2048
2049 // 3. Allocation of one more (even small) buffer should fail.
2050 {
2051 VmaAllocationCreateInfo allocCreateInfo = {};
2052 allocCreateInfo.pool = hPool;
2053
2054 bufCreateInfo.size = 128;
2055
2056 VkBuffer hBuf;
2057 VmaAllocation hAlloc;
2058 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002059 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002060 }
2061
2062 // Destroy everything.
2063 for(size_t i = items.size(); i--; )
2064 {
2065 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2066 }
2067
2068 vmaDestroyPool(hAllocator, hPool);
2069
2070 vmaDestroyAllocator(hAllocator);
2071}
2072
Adam Sawicki212a4a62018-06-14 15:44:45 +02002073#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002074static void TestDebugMargin()
2075{
2076 if(VMA_DEBUG_MARGIN == 0)
2077 {
2078 return;
2079 }
2080
2081 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002082 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002083
2084 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002085 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002086
2087 // Create few buffers of different size.
2088 const size_t BUF_COUNT = 10;
2089 BufferInfo buffers[BUF_COUNT];
2090 VmaAllocationInfo allocInfo[BUF_COUNT];
2091 for(size_t i = 0; i < 10; ++i)
2092 {
2093 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002094 // Last one will be mapped.
2095 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002096
2097 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002098 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002099 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002100 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002101
2102 if(i == BUF_COUNT - 1)
2103 {
2104 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002105 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002106 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2107 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2108 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002109 }
2110
2111 // Check if their offsets preserve margin between them.
2112 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2113 {
2114 if(lhs.deviceMemory != rhs.deviceMemory)
2115 {
2116 return lhs.deviceMemory < rhs.deviceMemory;
2117 }
2118 return lhs.offset < rhs.offset;
2119 });
2120 for(size_t i = 1; i < BUF_COUNT; ++i)
2121 {
2122 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2123 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002124 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002125 }
2126 }
2127
Adam Sawicki212a4a62018-06-14 15:44:45 +02002128 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002129 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002130
Adam Sawicki73b16652018-06-11 16:39:25 +02002131 // Destroy all buffers.
2132 for(size_t i = BUF_COUNT; i--; )
2133 {
2134 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2135 }
2136}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002137#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002138
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002139static void TestLinearAllocator()
2140{
2141 wprintf(L"Test linear allocator\n");
2142
2143 RandomNumberGenerator rand{645332};
2144
2145 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2146 sampleBufCreateInfo.size = 1024; // Whatever.
2147 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2148
2149 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2150 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2151
2152 VmaPoolCreateInfo poolCreateInfo = {};
2153 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002154 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002155
Adam Sawickiee082772018-06-20 17:45:49 +02002156 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002157 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2158 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2159
2160 VmaPool pool = nullptr;
2161 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002162 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002163
2164 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2165
2166 VmaAllocationCreateInfo allocCreateInfo = {};
2167 allocCreateInfo.pool = pool;
2168
2169 constexpr size_t maxBufCount = 100;
2170 std::vector<BufferInfo> bufInfo;
2171
2172 constexpr VkDeviceSize bufSizeMin = 16;
2173 constexpr VkDeviceSize bufSizeMax = 1024;
2174 VmaAllocationInfo allocInfo;
2175 VkDeviceSize prevOffset = 0;
2176
2177 // Test one-time free.
2178 for(size_t i = 0; i < 2; ++i)
2179 {
2180 // Allocate number of buffers of varying size that surely fit into this block.
2181 VkDeviceSize bufSumSize = 0;
2182 for(size_t i = 0; i < maxBufCount; ++i)
2183 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002184 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002185 BufferInfo newBufInfo;
2186 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2187 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002188 TEST(res == VK_SUCCESS);
2189 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002190 bufInfo.push_back(newBufInfo);
2191 prevOffset = allocInfo.offset;
2192 bufSumSize += bufCreateInfo.size;
2193 }
2194
2195 // Validate pool stats.
2196 VmaPoolStats stats;
2197 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002198 TEST(stats.size == poolCreateInfo.blockSize);
2199 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2200 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002201
2202 // Destroy the buffers in random order.
2203 while(!bufInfo.empty())
2204 {
2205 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2206 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2207 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2208 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2209 }
2210 }
2211
2212 // Test stack.
2213 {
2214 // Allocate number of buffers of varying size that surely fit into this block.
2215 for(size_t i = 0; i < maxBufCount; ++i)
2216 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002217 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002218 BufferInfo newBufInfo;
2219 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2220 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002221 TEST(res == VK_SUCCESS);
2222 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002223 bufInfo.push_back(newBufInfo);
2224 prevOffset = allocInfo.offset;
2225 }
2226
2227 // Destroy few buffers from top of the stack.
2228 for(size_t i = 0; i < maxBufCount / 5; ++i)
2229 {
2230 const BufferInfo& currBufInfo = bufInfo.back();
2231 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2232 bufInfo.pop_back();
2233 }
2234
2235 // Create some more
2236 for(size_t i = 0; i < maxBufCount / 5; ++i)
2237 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002238 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002239 BufferInfo newBufInfo;
2240 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2241 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002242 TEST(res == VK_SUCCESS);
2243 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002244 bufInfo.push_back(newBufInfo);
2245 prevOffset = allocInfo.offset;
2246 }
2247
2248 // Destroy the buffers in reverse order.
2249 while(!bufInfo.empty())
2250 {
2251 const BufferInfo& currBufInfo = bufInfo.back();
2252 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2253 bufInfo.pop_back();
2254 }
2255 }
2256
Adam Sawickiee082772018-06-20 17:45:49 +02002257 // Test ring buffer.
2258 {
2259 // Allocate number of buffers that surely fit into this block.
2260 bufCreateInfo.size = bufSizeMax;
2261 for(size_t i = 0; i < maxBufCount; ++i)
2262 {
2263 BufferInfo newBufInfo;
2264 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2265 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002266 TEST(res == VK_SUCCESS);
2267 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002268 bufInfo.push_back(newBufInfo);
2269 prevOffset = allocInfo.offset;
2270 }
2271
2272 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2273 const size_t buffersPerIter = maxBufCount / 10 - 1;
2274 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2275 for(size_t iter = 0; iter < iterCount; ++iter)
2276 {
2277 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2278 {
2279 const BufferInfo& currBufInfo = bufInfo.front();
2280 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2281 bufInfo.erase(bufInfo.begin());
2282 }
2283 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2284 {
2285 BufferInfo newBufInfo;
2286 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2287 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002288 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002289 bufInfo.push_back(newBufInfo);
2290 }
2291 }
2292
2293 // Allocate buffers until we reach out-of-memory.
2294 uint32_t debugIndex = 0;
2295 while(res == VK_SUCCESS)
2296 {
2297 BufferInfo newBufInfo;
2298 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2299 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2300 if(res == VK_SUCCESS)
2301 {
2302 bufInfo.push_back(newBufInfo);
2303 }
2304 else
2305 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002306 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002307 }
2308 ++debugIndex;
2309 }
2310
2311 // Destroy the buffers in random order.
2312 while(!bufInfo.empty())
2313 {
2314 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2315 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2316 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2317 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2318 }
2319 }
2320
Adam Sawicki680b2252018-08-22 14:47:32 +02002321 // Test double stack.
2322 {
2323 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2324 VkDeviceSize prevOffsetLower = 0;
2325 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2326 for(size_t i = 0; i < maxBufCount; ++i)
2327 {
2328 const bool upperAddress = (i % 2) != 0;
2329 if(upperAddress)
2330 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2331 else
2332 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002333 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002334 BufferInfo newBufInfo;
2335 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2336 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002337 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002338 if(upperAddress)
2339 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002340 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002341 prevOffsetUpper = allocInfo.offset;
2342 }
2343 else
2344 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002345 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002346 prevOffsetLower = allocInfo.offset;
2347 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002348 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002349 bufInfo.push_back(newBufInfo);
2350 }
2351
2352 // Destroy few buffers from top of the stack.
2353 for(size_t i = 0; i < maxBufCount / 5; ++i)
2354 {
2355 const BufferInfo& currBufInfo = bufInfo.back();
2356 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2357 bufInfo.pop_back();
2358 }
2359
2360 // Create some more
2361 for(size_t i = 0; i < maxBufCount / 5; ++i)
2362 {
2363 const bool upperAddress = (i % 2) != 0;
2364 if(upperAddress)
2365 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2366 else
2367 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002368 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002369 BufferInfo newBufInfo;
2370 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2371 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002372 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002373 bufInfo.push_back(newBufInfo);
2374 }
2375
2376 // Destroy the buffers in reverse order.
2377 while(!bufInfo.empty())
2378 {
2379 const BufferInfo& currBufInfo = bufInfo.back();
2380 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2381 bufInfo.pop_back();
2382 }
2383
2384 // Create buffers on both sides until we reach out of memory.
2385 prevOffsetLower = 0;
2386 prevOffsetUpper = poolCreateInfo.blockSize;
2387 res = VK_SUCCESS;
2388 for(size_t i = 0; res == VK_SUCCESS; ++i)
2389 {
2390 const bool upperAddress = (i % 2) != 0;
2391 if(upperAddress)
2392 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2393 else
2394 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002395 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002396 BufferInfo newBufInfo;
2397 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2398 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2399 if(res == VK_SUCCESS)
2400 {
2401 if(upperAddress)
2402 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002403 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002404 prevOffsetUpper = allocInfo.offset;
2405 }
2406 else
2407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002408 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002409 prevOffsetLower = allocInfo.offset;
2410 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002411 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002412 bufInfo.push_back(newBufInfo);
2413 }
2414 }
2415
2416 // Destroy the buffers in random order.
2417 while(!bufInfo.empty())
2418 {
2419 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2420 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2421 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2422 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2423 }
2424
2425 // Create buffers on upper side only, constant size, until we reach out of memory.
2426 prevOffsetUpper = poolCreateInfo.blockSize;
2427 res = VK_SUCCESS;
2428 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2429 bufCreateInfo.size = bufSizeMax;
2430 for(size_t i = 0; res == VK_SUCCESS; ++i)
2431 {
2432 BufferInfo newBufInfo;
2433 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2434 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2435 if(res == VK_SUCCESS)
2436 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002437 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002438 prevOffsetUpper = allocInfo.offset;
2439 bufInfo.push_back(newBufInfo);
2440 }
2441 }
2442
2443 // Destroy the buffers in reverse order.
2444 while(!bufInfo.empty())
2445 {
2446 const BufferInfo& currBufInfo = bufInfo.back();
2447 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2448 bufInfo.pop_back();
2449 }
2450 }
2451
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002452 // Test ring buffer with lost allocations.
2453 {
2454 // Allocate number of buffers until pool is full.
2455 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2456 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2457 res = VK_SUCCESS;
2458 for(size_t i = 0; res == VK_SUCCESS; ++i)
2459 {
2460 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2461
Adam Sawickifd366b62019-01-24 15:26:43 +01002462 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002463
2464 BufferInfo newBufInfo;
2465 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2466 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2467 if(res == VK_SUCCESS)
2468 bufInfo.push_back(newBufInfo);
2469 }
2470
2471 // Free first half of it.
2472 {
2473 const size_t buffersToDelete = bufInfo.size() / 2;
2474 for(size_t i = 0; i < buffersToDelete; ++i)
2475 {
2476 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2477 }
2478 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2479 }
2480
2481 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002482 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002483 res = VK_SUCCESS;
2484 for(size_t i = 0; res == VK_SUCCESS; ++i)
2485 {
2486 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2487
Adam Sawickifd366b62019-01-24 15:26:43 +01002488 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002489
2490 BufferInfo newBufInfo;
2491 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2492 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2493 if(res == VK_SUCCESS)
2494 bufInfo.push_back(newBufInfo);
2495 }
2496
2497 VkDeviceSize firstNewOffset;
2498 {
2499 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2500
2501 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2502 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2503 bufCreateInfo.size = bufSizeMax;
2504
2505 BufferInfo newBufInfo;
2506 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2507 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002508 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002509 bufInfo.push_back(newBufInfo);
2510 firstNewOffset = allocInfo.offset;
2511
2512 // Make sure at least one buffer from the beginning became lost.
2513 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002514 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002515 }
2516
Adam Sawickifd366b62019-01-24 15:26:43 +01002517#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002518 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2519 size_t newCount = 1;
2520 for(;;)
2521 {
2522 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2523
Adam Sawickifd366b62019-01-24 15:26:43 +01002524 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002525
2526 BufferInfo newBufInfo;
2527 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2528 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01002529
Adam Sawickib8d34d52018-10-03 17:41:20 +02002530 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002531 bufInfo.push_back(newBufInfo);
2532 ++newCount;
2533 if(allocInfo.offset < firstNewOffset)
2534 break;
2535 }
Adam Sawickifd366b62019-01-24 15:26:43 +01002536#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002537
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002538 // Delete buffers that are lost.
2539 for(size_t i = bufInfo.size(); i--; )
2540 {
2541 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2542 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2543 {
2544 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2545 bufInfo.erase(bufInfo.begin() + i);
2546 }
2547 }
2548
2549 // Test vmaMakePoolAllocationsLost
2550 {
2551 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2552
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01002553 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002554 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002555 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002556
2557 size_t realLostAllocCount = 0;
2558 for(size_t i = 0; i < bufInfo.size(); ++i)
2559 {
2560 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2561 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2562 ++realLostAllocCount;
2563 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002564 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002565 }
2566
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002567 // Destroy all the buffers in forward order.
2568 for(size_t i = 0; i < bufInfo.size(); ++i)
2569 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2570 bufInfo.clear();
2571 }
2572
Adam Sawicki70a683e2018-08-24 15:36:32 +02002573 vmaDestroyPool(g_hAllocator, pool);
2574}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002575
Adam Sawicki70a683e2018-08-24 15:36:32 +02002576static void TestLinearAllocatorMultiBlock()
2577{
2578 wprintf(L"Test linear allocator multi block\n");
2579
2580 RandomNumberGenerator rand{345673};
2581
2582 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2583 sampleBufCreateInfo.size = 1024 * 1024;
2584 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2585
2586 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2587 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2588
2589 VmaPoolCreateInfo poolCreateInfo = {};
2590 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2591 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002592 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002593
2594 VmaPool pool = nullptr;
2595 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002596 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002597
2598 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2599
2600 VmaAllocationCreateInfo allocCreateInfo = {};
2601 allocCreateInfo.pool = pool;
2602
2603 std::vector<BufferInfo> bufInfo;
2604 VmaAllocationInfo allocInfo;
2605
2606 // Test one-time free.
2607 {
2608 // Allocate buffers until we move to a second block.
2609 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2610 for(uint32_t i = 0; ; ++i)
2611 {
2612 BufferInfo newBufInfo;
2613 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2614 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002615 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002616 bufInfo.push_back(newBufInfo);
2617 if(lastMem && allocInfo.deviceMemory != lastMem)
2618 {
2619 break;
2620 }
2621 lastMem = allocInfo.deviceMemory;
2622 }
2623
Adam Sawickib8d34d52018-10-03 17:41:20 +02002624 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002625
2626 // Make sure that pool has now two blocks.
2627 VmaPoolStats poolStats = {};
2628 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002629 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002630
2631 // Destroy all the buffers in random order.
2632 while(!bufInfo.empty())
2633 {
2634 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2635 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2636 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2637 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2638 }
2639
2640 // Make sure that pool has now at most one block.
2641 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002642 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002643 }
2644
2645 // Test stack.
2646 {
2647 // Allocate buffers until we move to a second block.
2648 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2649 for(uint32_t i = 0; ; ++i)
2650 {
2651 BufferInfo newBufInfo;
2652 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2653 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002654 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002655 bufInfo.push_back(newBufInfo);
2656 if(lastMem && allocInfo.deviceMemory != lastMem)
2657 {
2658 break;
2659 }
2660 lastMem = allocInfo.deviceMemory;
2661 }
2662
Adam Sawickib8d34d52018-10-03 17:41:20 +02002663 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002664
2665 // Add few more buffers.
2666 for(uint32_t i = 0; i < 5; ++i)
2667 {
2668 BufferInfo newBufInfo;
2669 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2670 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002671 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002672 bufInfo.push_back(newBufInfo);
2673 }
2674
2675 // Make sure that pool has now two blocks.
2676 VmaPoolStats poolStats = {};
2677 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002678 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002679
2680 // Delete half of buffers, LIFO.
2681 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2682 {
2683 const BufferInfo& currBufInfo = bufInfo.back();
2684 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2685 bufInfo.pop_back();
2686 }
2687
2688 // Add one more buffer.
2689 BufferInfo newBufInfo;
2690 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2691 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002692 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002693 bufInfo.push_back(newBufInfo);
2694
2695 // Make sure that pool has now one block.
2696 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002697 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002698
2699 // Delete all the remaining buffers, LIFO.
2700 while(!bufInfo.empty())
2701 {
2702 const BufferInfo& currBufInfo = bufInfo.back();
2703 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2704 bufInfo.pop_back();
2705 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002706 }
2707
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002708 vmaDestroyPool(g_hAllocator, pool);
2709}
2710
Adam Sawickifd11d752018-08-22 15:02:10 +02002711static void ManuallyTestLinearAllocator()
2712{
2713 VmaStats origStats;
2714 vmaCalculateStats(g_hAllocator, &origStats);
2715
2716 wprintf(L"Manually test linear allocator\n");
2717
2718 RandomNumberGenerator rand{645332};
2719
2720 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2721 sampleBufCreateInfo.size = 1024; // Whatever.
2722 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2723
2724 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2725 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2726
2727 VmaPoolCreateInfo poolCreateInfo = {};
2728 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002729 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002730
2731 poolCreateInfo.blockSize = 10 * 1024;
2732 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2733 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2734
2735 VmaPool pool = nullptr;
2736 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002737 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002738
2739 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2740
2741 VmaAllocationCreateInfo allocCreateInfo = {};
2742 allocCreateInfo.pool = pool;
2743
2744 std::vector<BufferInfo> bufInfo;
2745 VmaAllocationInfo allocInfo;
2746 BufferInfo newBufInfo;
2747
2748 // Test double stack.
2749 {
2750 /*
2751 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2752 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2753
2754 Totally:
2755 1 block allocated
2756 10240 Vulkan bytes
2757 6 new allocations
2758 2256 bytes in allocations
2759 */
2760
2761 bufCreateInfo.size = 32;
2762 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2763 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002764 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002765 bufInfo.push_back(newBufInfo);
2766
2767 bufCreateInfo.size = 1024;
2768 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2769 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002770 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002771 bufInfo.push_back(newBufInfo);
2772
2773 bufCreateInfo.size = 32;
2774 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2775 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002776 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002777 bufInfo.push_back(newBufInfo);
2778
2779 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2780
2781 bufCreateInfo.size = 128;
2782 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2783 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002784 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002785 bufInfo.push_back(newBufInfo);
2786
2787 bufCreateInfo.size = 1024;
2788 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2789 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002790 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002791 bufInfo.push_back(newBufInfo);
2792
2793 bufCreateInfo.size = 16;
2794 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2795 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002796 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002797 bufInfo.push_back(newBufInfo);
2798
2799 VmaStats currStats;
2800 vmaCalculateStats(g_hAllocator, &currStats);
2801 VmaPoolStats poolStats;
2802 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2803
2804 char* statsStr = nullptr;
2805 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2806
2807 // PUT BREAKPOINT HERE TO CHECK.
2808 // Inspect: currStats versus origStats, poolStats, statsStr.
2809 int I = 0;
2810
2811 vmaFreeStatsString(g_hAllocator, statsStr);
2812
2813 // Destroy the buffers in reverse order.
2814 while(!bufInfo.empty())
2815 {
2816 const BufferInfo& currBufInfo = bufInfo.back();
2817 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2818 bufInfo.pop_back();
2819 }
2820 }
2821
2822 vmaDestroyPool(g_hAllocator, pool);
2823}
2824
Adam Sawicki80927152018-09-07 17:27:23 +02002825static void BenchmarkAlgorithmsCase(FILE* file,
2826 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002827 bool empty,
2828 VmaAllocationCreateFlags allocStrategy,
2829 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002830{
2831 RandomNumberGenerator rand{16223};
2832
2833 const VkDeviceSize bufSizeMin = 32;
2834 const VkDeviceSize bufSizeMax = 1024;
2835 const size_t maxBufCapacity = 10000;
2836 const uint32_t iterationCount = 10;
2837
2838 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2839 sampleBufCreateInfo.size = bufSizeMax;
2840 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2841
2842 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2843 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2844
2845 VmaPoolCreateInfo poolCreateInfo = {};
2846 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002847 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002848
2849 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002850 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002851 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2852
2853 VmaPool pool = nullptr;
2854 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002855 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002856
2857 // Buffer created just to get memory requirements. Never bound to any memory.
2858 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2859 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002860 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002861
2862 VkMemoryRequirements memReq = {};
2863 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2864
2865 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2866
2867 VmaAllocationCreateInfo allocCreateInfo = {};
2868 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002869 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002870
2871 VmaAllocation alloc;
2872 std::vector<VmaAllocation> baseAllocations;
2873
2874 if(!empty)
2875 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002876 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002877 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002878 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002879 {
2880 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2881 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002882 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002883 baseAllocations.push_back(alloc);
2884 totalSize += memReq.size;
2885 }
2886
2887 // Delete half of them, choose randomly.
2888 size_t allocsToDelete = baseAllocations.size() / 2;
2889 for(size_t i = 0; i < allocsToDelete; ++i)
2890 {
2891 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2892 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2893 baseAllocations.erase(baseAllocations.begin() + index);
2894 }
2895 }
2896
2897 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002898 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002899 std::vector<VmaAllocation> testAllocations;
2900 testAllocations.reserve(allocCount);
2901 duration allocTotalDuration = duration::zero();
2902 duration freeTotalDuration = duration::zero();
2903 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2904 {
2905 // Allocations
2906 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2907 for(size_t i = 0; i < allocCount; ++i)
2908 {
2909 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2910 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002911 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002912 testAllocations.push_back(alloc);
2913 }
2914 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2915
2916 // Deallocations
2917 switch(freeOrder)
2918 {
2919 case FREE_ORDER::FORWARD:
2920 // Leave testAllocations unchanged.
2921 break;
2922 case FREE_ORDER::BACKWARD:
2923 std::reverse(testAllocations.begin(), testAllocations.end());
2924 break;
2925 case FREE_ORDER::RANDOM:
2926 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2927 break;
2928 default: assert(0);
2929 }
2930
2931 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2932 for(size_t i = 0; i < allocCount; ++i)
2933 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2934 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2935
2936 testAllocations.clear();
2937 }
2938
2939 // Delete baseAllocations
2940 while(!baseAllocations.empty())
2941 {
2942 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2943 baseAllocations.pop_back();
2944 }
2945
2946 vmaDestroyPool(g_hAllocator, pool);
2947
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002948 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2949 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2950
Adam Sawicki80927152018-09-07 17:27:23 +02002951 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2952 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002953 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002954 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002955 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002956 allocTotalSeconds,
2957 freeTotalSeconds);
2958
2959 if(file)
2960 {
2961 std::string currTime;
2962 CurrentTimeToStr(currTime);
2963
Adam Sawicki80927152018-09-07 17:27:23 +02002964 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002965 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002966 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002967 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002968 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002969 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2970 allocTotalSeconds,
2971 freeTotalSeconds);
2972 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002973}
2974
Adam Sawicki80927152018-09-07 17:27:23 +02002975static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002976{
Adam Sawicki80927152018-09-07 17:27:23 +02002977 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002978
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002979 if(file)
2980 {
2981 fprintf(file,
2982 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002983 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002984 "Allocation time (s),Deallocation time (s)\n");
2985 }
2986
Adam Sawicki0a607132018-08-24 11:18:41 +02002987 uint32_t freeOrderCount = 1;
2988 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2989 freeOrderCount = 3;
2990 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2991 freeOrderCount = 2;
2992
2993 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002994 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002995
2996 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2997 {
2998 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2999 switch(freeOrderIndex)
3000 {
3001 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3002 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3003 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3004 default: assert(0);
3005 }
3006
3007 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3008 {
Adam Sawicki80927152018-09-07 17:27:23 +02003009 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003010 {
Adam Sawicki80927152018-09-07 17:27:23 +02003011 uint32_t algorithm = 0;
3012 switch(algorithmIndex)
3013 {
3014 case 0:
3015 break;
3016 case 1:
3017 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3018 break;
3019 case 2:
3020 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3021 break;
3022 default:
3023 assert(0);
3024 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003025
Adam Sawicki80927152018-09-07 17:27:23 +02003026 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003027 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3028 {
3029 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003030 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003031 {
3032 switch(allocStrategyIndex)
3033 {
3034 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3035 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3036 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3037 default: assert(0);
3038 }
3039 }
3040
Adam Sawicki80927152018-09-07 17:27:23 +02003041 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003042 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003043 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003044 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003045 strategy,
3046 freeOrder); // freeOrder
3047 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003048 }
3049 }
3050 }
3051}
3052
Adam Sawickib8333fb2018-03-13 16:15:53 +01003053static void TestPool_SameSize()
3054{
3055 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3056 const size_t BUF_COUNT = 100;
3057 VkResult res;
3058
3059 RandomNumberGenerator rand{123};
3060
3061 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3062 bufferInfo.size = BUF_SIZE;
3063 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3064
3065 uint32_t memoryTypeBits = UINT32_MAX;
3066 {
3067 VkBuffer dummyBuffer;
3068 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003069 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003070
3071 VkMemoryRequirements memReq;
3072 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3073 memoryTypeBits = memReq.memoryTypeBits;
3074
3075 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3076 }
3077
3078 VmaAllocationCreateInfo poolAllocInfo = {};
3079 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3080 uint32_t memTypeIndex;
3081 res = vmaFindMemoryTypeIndex(
3082 g_hAllocator,
3083 memoryTypeBits,
3084 &poolAllocInfo,
3085 &memTypeIndex);
3086
3087 VmaPoolCreateInfo poolCreateInfo = {};
3088 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3089 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3090 poolCreateInfo.minBlockCount = 1;
3091 poolCreateInfo.maxBlockCount = 4;
3092 poolCreateInfo.frameInUseCount = 0;
3093
3094 VmaPool pool;
3095 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003096 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003097
3098 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3099
3100 VmaAllocationCreateInfo allocInfo = {};
3101 allocInfo.pool = pool;
3102 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3103 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3104
3105 struct BufItem
3106 {
3107 VkBuffer Buf;
3108 VmaAllocation Alloc;
3109 };
3110 std::vector<BufItem> items;
3111
3112 // Fill entire pool.
3113 for(size_t i = 0; i < BUF_COUNT; ++i)
3114 {
3115 BufItem item;
3116 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003117 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003118 items.push_back(item);
3119 }
3120
3121 // Make sure that another allocation would fail.
3122 {
3123 BufItem item;
3124 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003125 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003126 }
3127
3128 // Validate that no buffer is lost. Also check that they are not mapped.
3129 for(size_t i = 0; i < items.size(); ++i)
3130 {
3131 VmaAllocationInfo allocInfo;
3132 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003133 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3134 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003135 }
3136
3137 // Free some percent of random items.
3138 {
3139 const size_t PERCENT_TO_FREE = 10;
3140 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3141 for(size_t i = 0; i < itemsToFree; ++i)
3142 {
3143 size_t index = (size_t)rand.Generate() % items.size();
3144 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3145 items.erase(items.begin() + index);
3146 }
3147 }
3148
3149 // Randomly allocate and free items.
3150 {
3151 const size_t OPERATION_COUNT = BUF_COUNT;
3152 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3153 {
3154 bool allocate = rand.Generate() % 2 != 0;
3155 if(allocate)
3156 {
3157 if(items.size() < BUF_COUNT)
3158 {
3159 BufItem item;
3160 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003161 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003162 items.push_back(item);
3163 }
3164 }
3165 else // Free
3166 {
3167 if(!items.empty())
3168 {
3169 size_t index = (size_t)rand.Generate() % items.size();
3170 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3171 items.erase(items.begin() + index);
3172 }
3173 }
3174 }
3175 }
3176
3177 // Allocate up to maximum.
3178 while(items.size() < BUF_COUNT)
3179 {
3180 BufItem item;
3181 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003182 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003183 items.push_back(item);
3184 }
3185
3186 // Validate that no buffer is lost.
3187 for(size_t i = 0; i < items.size(); ++i)
3188 {
3189 VmaAllocationInfo allocInfo;
3190 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003191 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003192 }
3193
3194 // Next frame.
3195 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3196
3197 // Allocate another BUF_COUNT buffers.
3198 for(size_t i = 0; i < BUF_COUNT; ++i)
3199 {
3200 BufItem item;
3201 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003202 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003203 items.push_back(item);
3204 }
3205
3206 // Make sure the first BUF_COUNT is lost. Delete them.
3207 for(size_t i = 0; i < BUF_COUNT; ++i)
3208 {
3209 VmaAllocationInfo allocInfo;
3210 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003211 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003212 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3213 }
3214 items.erase(items.begin(), items.begin() + BUF_COUNT);
3215
3216 // Validate that no buffer is lost.
3217 for(size_t i = 0; i < items.size(); ++i)
3218 {
3219 VmaAllocationInfo allocInfo;
3220 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003221 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003222 }
3223
3224 // Free one item.
3225 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3226 items.pop_back();
3227
3228 // Validate statistics.
3229 {
3230 VmaPoolStats poolStats = {};
3231 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003232 TEST(poolStats.allocationCount == items.size());
3233 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3234 TEST(poolStats.unusedRangeCount == 1);
3235 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3236 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003237 }
3238
3239 // Free all remaining items.
3240 for(size_t i = items.size(); i--; )
3241 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3242 items.clear();
3243
3244 // Allocate maximum items again.
3245 for(size_t i = 0; i < BUF_COUNT; ++i)
3246 {
3247 BufItem item;
3248 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003249 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003250 items.push_back(item);
3251 }
3252
3253 // Delete every other item.
3254 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3255 {
3256 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3257 items.erase(items.begin() + i);
3258 }
3259
3260 // Defragment!
3261 {
3262 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3263 for(size_t i = 0; i < items.size(); ++i)
3264 allocationsToDefragment[i] = items[i].Alloc;
3265
3266 VmaDefragmentationStats defragmentationStats;
3267 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003268 TEST(res == VK_SUCCESS);
3269 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003270 }
3271
3272 // Free all remaining items.
3273 for(size_t i = items.size(); i--; )
3274 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3275 items.clear();
3276
3277 ////////////////////////////////////////////////////////////////////////////////
3278 // Test for vmaMakePoolAllocationsLost
3279
3280 // Allocate 4 buffers on frame 10.
3281 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3282 for(size_t i = 0; i < 4; ++i)
3283 {
3284 BufItem item;
3285 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003286 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003287 items.push_back(item);
3288 }
3289
3290 // Touch first 2 of them on frame 11.
3291 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3292 for(size_t i = 0; i < 2; ++i)
3293 {
3294 VmaAllocationInfo allocInfo;
3295 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3296 }
3297
3298 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3299 size_t lostCount = 0xDEADC0DE;
3300 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003301 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003302
3303 // Make another call. Now 0 should be lost.
3304 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003305 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003306
3307 // Make another call, with null count. Should not crash.
3308 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3309
3310 // END: Free all remaining items.
3311 for(size_t i = items.size(); i--; )
3312 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3313
3314 items.clear();
3315
Adam Sawickid2924172018-06-11 12:48:46 +02003316 ////////////////////////////////////////////////////////////////////////////////
3317 // Test for allocation too large for pool
3318
3319 {
3320 VmaAllocationCreateInfo allocCreateInfo = {};
3321 allocCreateInfo.pool = pool;
3322
3323 VkMemoryRequirements memReq;
3324 memReq.memoryTypeBits = UINT32_MAX;
3325 memReq.alignment = 1;
3326 memReq.size = poolCreateInfo.blockSize + 4;
3327
3328 VmaAllocation alloc = nullptr;
3329 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003330 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003331 }
3332
Adam Sawickib8333fb2018-03-13 16:15:53 +01003333 vmaDestroyPool(g_hAllocator, pool);
3334}
3335
Adam Sawickib0c36362018-11-13 16:17:38 +01003336static void TestResize()
3337{
3338 wprintf(L"Testing vmaResizeAllocation...\n");
3339
3340 const VkDeviceSize KILOBYTE = 1024ull;
3341 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3342
3343 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3344 bufCreateInfo.size = 2 * MEGABYTE;
3345 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3346
3347 VmaAllocationCreateInfo allocCreateInfo = {};
3348 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3349
3350 uint32_t memTypeIndex = UINT32_MAX;
3351 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3352
3353 VmaPoolCreateInfo poolCreateInfo = {};
3354 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3355 poolCreateInfo.blockSize = 8 * MEGABYTE;
3356 poolCreateInfo.minBlockCount = 1;
3357 poolCreateInfo.maxBlockCount = 1;
3358 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3359
3360 VmaPool pool;
3361 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3362
3363 allocCreateInfo.pool = pool;
3364
3365 // Fill 8 MB pool with 4 * 2 MB allocations.
3366 VmaAllocation allocs[4] = {};
3367
3368 VkMemoryRequirements memReq = {};
3369 memReq.memoryTypeBits = UINT32_MAX;
3370 memReq.alignment = 4;
3371 memReq.size = bufCreateInfo.size;
3372
3373 VmaAllocationInfo allocInfo = {};
3374
3375 for(uint32_t i = 0; i < 4; ++i)
3376 {
3377 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3378 }
3379
3380 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3381
3382 // Case: Resize to the same size always succeeds.
3383 {
3384 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3385 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3386 TEST(allocInfo.size == 2ull * 1024 * 1024);
3387 }
3388
3389 // Case: Shrink allocation at the end.
3390 {
3391 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3392 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3393 TEST(allocInfo.size == 1ull * 1024 * 1024);
3394 }
3395
3396 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3397
3398 // Case: Shrink allocation before free space.
3399 {
3400 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3401 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3402 TEST(allocInfo.size == 512 * KILOBYTE);
3403 }
3404
3405 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3406
3407 // Case: Shrink allocation before next allocation.
3408 {
3409 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3410 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3411 TEST(allocInfo.size == 1 * MEGABYTE);
3412 }
3413
3414 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3415
3416 // Case: Grow allocation while there is even more space available.
3417 {
3418 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3419 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3420 TEST(allocInfo.size == 1 * MEGABYTE);
3421 }
3422
3423 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3424
3425 // Case: Grow allocation while there is exact amount of free space available.
3426 {
3427 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3428 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3429 TEST(allocInfo.size == 2 * MEGABYTE);
3430 }
3431
3432 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3433
3434 // Case: Fail to grow when there is not enough free space due to next allocation.
3435 {
3436 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3437 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3438 TEST(allocInfo.size == 2 * MEGABYTE);
3439 }
3440
3441 // Case: Fail to grow when there is not enough free space due to end of memory block.
3442 {
3443 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3444 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3445 TEST(allocInfo.size == 1 * MEGABYTE);
3446 }
3447
3448 for(uint32_t i = 4; i--; )
3449 {
3450 vmaFreeMemory(g_hAllocator, allocs[i]);
3451 }
3452
3453 vmaDestroyPool(g_hAllocator, pool);
3454
3455 // Test dedicated allocation
3456 {
3457 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3458 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3459 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3460
3461 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3462 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3463
3464 // Case: Resize to the same size always succeeds.
3465 {
3466 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3467 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3468 TEST(allocInfo.size == 2ull * 1024 * 1024);
3469 }
3470
3471 // Case: Shrinking fails.
3472 {
3473 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3474 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3475 TEST(allocInfo.size == 2ull * 1024 * 1024);
3476 }
3477
3478 // Case: Growing fails.
3479 {
3480 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3481 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3482 TEST(allocInfo.size == 2ull * 1024 * 1024);
3483 }
3484
3485 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3486 }
3487}
3488
Adam Sawickie44c6262018-06-15 14:30:39 +02003489static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3490{
3491 const uint8_t* pBytes = (const uint8_t*)pMemory;
3492 for(size_t i = 0; i < size; ++i)
3493 {
3494 if(pBytes[i] != pattern)
3495 {
3496 return false;
3497 }
3498 }
3499 return true;
3500}
3501
3502static void TestAllocationsInitialization()
3503{
3504 VkResult res;
3505
3506 const size_t BUF_SIZE = 1024;
3507
3508 // Create pool.
3509
3510 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3511 bufInfo.size = BUF_SIZE;
3512 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3513
3514 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3515 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3516
3517 VmaPoolCreateInfo poolCreateInfo = {};
3518 poolCreateInfo.blockSize = BUF_SIZE * 10;
3519 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3520 poolCreateInfo.maxBlockCount = 1;
3521 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003522 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003523
3524 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3525 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003526 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003527
3528 // Create one persistently mapped buffer to keep memory of this block mapped,
3529 // so that pointer to mapped data will remain (more or less...) valid even
3530 // after destruction of other allocations.
3531
3532 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3533 VkBuffer firstBuf;
3534 VmaAllocation firstAlloc;
3535 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003536 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003537
3538 // Test buffers.
3539
3540 for(uint32_t i = 0; i < 2; ++i)
3541 {
3542 const bool persistentlyMapped = i == 0;
3543 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3544 VkBuffer buf;
3545 VmaAllocation alloc;
3546 VmaAllocationInfo allocInfo;
3547 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003548 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003549
3550 void* pMappedData;
3551 if(!persistentlyMapped)
3552 {
3553 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003554 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003555 }
3556 else
3557 {
3558 pMappedData = allocInfo.pMappedData;
3559 }
3560
3561 // Validate initialized content
3562 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003563 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003564
3565 if(!persistentlyMapped)
3566 {
3567 vmaUnmapMemory(g_hAllocator, alloc);
3568 }
3569
3570 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3571
3572 // Validate freed content
3573 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003574 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003575 }
3576
3577 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3578 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3579}
3580
Adam Sawickib8333fb2018-03-13 16:15:53 +01003581static void TestPool_Benchmark(
3582 PoolTestResult& outResult,
3583 const PoolTestConfig& config)
3584{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003585 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003586
3587 RandomNumberGenerator mainRand{config.RandSeed};
3588
3589 uint32_t allocationSizeProbabilitySum = std::accumulate(
3590 config.AllocationSizes.begin(),
3591 config.AllocationSizes.end(),
3592 0u,
3593 [](uint32_t sum, const AllocationSize& allocSize) {
3594 return sum + allocSize.Probability;
3595 });
3596
3597 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3598 bufferInfo.size = 256; // Whatever.
3599 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3600
3601 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3602 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3603 imageInfo.extent.width = 256; // Whatever.
3604 imageInfo.extent.height = 256; // Whatever.
3605 imageInfo.extent.depth = 1;
3606 imageInfo.mipLevels = 1;
3607 imageInfo.arrayLayers = 1;
3608 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3609 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3610 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3611 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3612 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3613
3614 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3615 {
3616 VkBuffer dummyBuffer;
3617 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003618 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003619
3620 VkMemoryRequirements memReq;
3621 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3622 bufferMemoryTypeBits = memReq.memoryTypeBits;
3623
3624 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3625 }
3626
3627 uint32_t imageMemoryTypeBits = UINT32_MAX;
3628 {
3629 VkImage dummyImage;
3630 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003631 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003632
3633 VkMemoryRequirements memReq;
3634 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3635 imageMemoryTypeBits = memReq.memoryTypeBits;
3636
3637 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3638 }
3639
3640 uint32_t memoryTypeBits = 0;
3641 if(config.UsesBuffers() && config.UsesImages())
3642 {
3643 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3644 if(memoryTypeBits == 0)
3645 {
3646 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3647 return;
3648 }
3649 }
3650 else if(config.UsesBuffers())
3651 memoryTypeBits = bufferMemoryTypeBits;
3652 else if(config.UsesImages())
3653 memoryTypeBits = imageMemoryTypeBits;
3654 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003655 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003656
3657 VmaPoolCreateInfo poolCreateInfo = {};
3658 poolCreateInfo.memoryTypeIndex = 0;
3659 poolCreateInfo.minBlockCount = 1;
3660 poolCreateInfo.maxBlockCount = 1;
3661 poolCreateInfo.blockSize = config.PoolSize;
3662 poolCreateInfo.frameInUseCount = 1;
3663
3664 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3665 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3666 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3667
3668 VmaPool pool;
3669 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003670 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003671
3672 // Start time measurement - after creating pool and initializing data structures.
3673 time_point timeBeg = std::chrono::high_resolution_clock::now();
3674
3675 ////////////////////////////////////////////////////////////////////////////////
3676 // ThreadProc
3677 auto ThreadProc = [&](
3678 PoolTestThreadResult* outThreadResult,
3679 uint32_t randSeed,
3680 HANDLE frameStartEvent,
3681 HANDLE frameEndEvent) -> void
3682 {
3683 RandomNumberGenerator threadRand{randSeed};
3684
3685 outThreadResult->AllocationTimeMin = duration::max();
3686 outThreadResult->AllocationTimeSum = duration::zero();
3687 outThreadResult->AllocationTimeMax = duration::min();
3688 outThreadResult->DeallocationTimeMin = duration::max();
3689 outThreadResult->DeallocationTimeSum = duration::zero();
3690 outThreadResult->DeallocationTimeMax = duration::min();
3691 outThreadResult->AllocationCount = 0;
3692 outThreadResult->DeallocationCount = 0;
3693 outThreadResult->LostAllocationCount = 0;
3694 outThreadResult->LostAllocationTotalSize = 0;
3695 outThreadResult->FailedAllocationCount = 0;
3696 outThreadResult->FailedAllocationTotalSize = 0;
3697
3698 struct Item
3699 {
3700 VkDeviceSize BufferSize;
3701 VkExtent2D ImageSize;
3702 VkBuffer Buf;
3703 VkImage Image;
3704 VmaAllocation Alloc;
3705
3706 VkDeviceSize CalcSizeBytes() const
3707 {
3708 return BufferSize +
3709 ImageSize.width * ImageSize.height * 4;
3710 }
3711 };
3712 std::vector<Item> unusedItems, usedItems;
3713
3714 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3715
3716 // Create all items - all unused, not yet allocated.
3717 for(size_t i = 0; i < threadTotalItemCount; ++i)
3718 {
3719 Item item = {};
3720
3721 uint32_t allocSizeIndex = 0;
3722 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3723 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3724 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3725
3726 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3727 if(allocSize.BufferSizeMax > 0)
3728 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003729 TEST(allocSize.BufferSizeMin > 0);
3730 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003731 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3732 item.BufferSize = allocSize.BufferSizeMin;
3733 else
3734 {
3735 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3736 item.BufferSize = item.BufferSize / 16 * 16;
3737 }
3738 }
3739 else
3740 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003741 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003742 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3743 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3744 else
3745 {
3746 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3747 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3748 }
3749 }
3750
3751 unusedItems.push_back(item);
3752 }
3753
3754 auto Allocate = [&](Item& item) -> VkResult
3755 {
3756 VmaAllocationCreateInfo allocCreateInfo = {};
3757 allocCreateInfo.pool = pool;
3758 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3759 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3760
3761 if(item.BufferSize)
3762 {
3763 bufferInfo.size = item.BufferSize;
3764 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3765 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3766 }
3767 else
3768 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003769 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003770
3771 imageInfo.extent.width = item.ImageSize.width;
3772 imageInfo.extent.height = item.ImageSize.height;
3773 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3774 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3775 }
3776 };
3777
3778 ////////////////////////////////////////////////////////////////////////////////
3779 // Frames
3780 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3781 {
3782 WaitForSingleObject(frameStartEvent, INFINITE);
3783
3784 // Always make some percent of used bufs unused, to choose different used ones.
3785 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3786 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3787 {
3788 size_t index = threadRand.Generate() % usedItems.size();
3789 unusedItems.push_back(usedItems[index]);
3790 usedItems.erase(usedItems.begin() + index);
3791 }
3792
3793 // Determine which bufs we want to use in this frame.
3794 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3795 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003796 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003797 // Move some used to unused.
3798 while(usedBufCount < usedItems.size())
3799 {
3800 size_t index = threadRand.Generate() % usedItems.size();
3801 unusedItems.push_back(usedItems[index]);
3802 usedItems.erase(usedItems.begin() + index);
3803 }
3804 // Move some unused to used.
3805 while(usedBufCount > usedItems.size())
3806 {
3807 size_t index = threadRand.Generate() % unusedItems.size();
3808 usedItems.push_back(unusedItems[index]);
3809 unusedItems.erase(unusedItems.begin() + index);
3810 }
3811
3812 uint32_t touchExistingCount = 0;
3813 uint32_t touchLostCount = 0;
3814 uint32_t createSucceededCount = 0;
3815 uint32_t createFailedCount = 0;
3816
3817 // Touch all used bufs. If not created or lost, allocate.
3818 for(size_t i = 0; i < usedItems.size(); ++i)
3819 {
3820 Item& item = usedItems[i];
3821 // Not yet created.
3822 if(item.Alloc == VK_NULL_HANDLE)
3823 {
3824 res = Allocate(item);
3825 ++outThreadResult->AllocationCount;
3826 if(res != VK_SUCCESS)
3827 {
3828 item.Alloc = VK_NULL_HANDLE;
3829 item.Buf = VK_NULL_HANDLE;
3830 ++outThreadResult->FailedAllocationCount;
3831 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3832 ++createFailedCount;
3833 }
3834 else
3835 ++createSucceededCount;
3836 }
3837 else
3838 {
3839 // Touch.
3840 VmaAllocationInfo allocInfo;
3841 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3842 // Lost.
3843 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3844 {
3845 ++touchLostCount;
3846
3847 // Destroy.
3848 {
3849 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3850 if(item.Buf)
3851 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3852 else
3853 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3854 ++outThreadResult->DeallocationCount;
3855 }
3856 item.Alloc = VK_NULL_HANDLE;
3857 item.Buf = VK_NULL_HANDLE;
3858
3859 ++outThreadResult->LostAllocationCount;
3860 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3861
3862 // Recreate.
3863 res = Allocate(item);
3864 ++outThreadResult->AllocationCount;
3865 // Creation failed.
3866 if(res != VK_SUCCESS)
3867 {
3868 ++outThreadResult->FailedAllocationCount;
3869 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3870 ++createFailedCount;
3871 }
3872 else
3873 ++createSucceededCount;
3874 }
3875 else
3876 ++touchExistingCount;
3877 }
3878 }
3879
3880 /*
3881 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3882 randSeed, frameIndex,
3883 touchExistingCount, touchLostCount,
3884 createSucceededCount, createFailedCount);
3885 */
3886
3887 SetEvent(frameEndEvent);
3888 }
3889
3890 // Free all remaining items.
3891 for(size_t i = usedItems.size(); i--; )
3892 {
3893 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3894 if(usedItems[i].Buf)
3895 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3896 else
3897 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3898 ++outThreadResult->DeallocationCount;
3899 }
3900 for(size_t i = unusedItems.size(); i--; )
3901 {
3902 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3903 if(unusedItems[i].Buf)
3904 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3905 else
3906 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3907 ++outThreadResult->DeallocationCount;
3908 }
3909 };
3910
3911 // Launch threads.
3912 uint32_t threadRandSeed = mainRand.Generate();
3913 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3914 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3915 std::vector<std::thread> bkgThreads;
3916 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3917 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3918 {
3919 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3920 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3921 bkgThreads.emplace_back(std::bind(
3922 ThreadProc,
3923 &threadResults[threadIndex],
3924 threadRandSeed + threadIndex,
3925 frameStartEvents[threadIndex],
3926 frameEndEvents[threadIndex]));
3927 }
3928
3929 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003930 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003931 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3932 {
3933 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3934 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3935 SetEvent(frameStartEvents[threadIndex]);
3936 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3937 }
3938
3939 // Wait for threads finished
3940 for(size_t i = 0; i < bkgThreads.size(); ++i)
3941 {
3942 bkgThreads[i].join();
3943 CloseHandle(frameEndEvents[i]);
3944 CloseHandle(frameStartEvents[i]);
3945 }
3946 bkgThreads.clear();
3947
3948 // Finish time measurement - before destroying pool.
3949 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3950
3951 vmaDestroyPool(g_hAllocator, pool);
3952
3953 outResult.AllocationTimeMin = duration::max();
3954 outResult.AllocationTimeAvg = duration::zero();
3955 outResult.AllocationTimeMax = duration::min();
3956 outResult.DeallocationTimeMin = duration::max();
3957 outResult.DeallocationTimeAvg = duration::zero();
3958 outResult.DeallocationTimeMax = duration::min();
3959 outResult.LostAllocationCount = 0;
3960 outResult.LostAllocationTotalSize = 0;
3961 outResult.FailedAllocationCount = 0;
3962 outResult.FailedAllocationTotalSize = 0;
3963 size_t allocationCount = 0;
3964 size_t deallocationCount = 0;
3965 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3966 {
3967 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3968 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3969 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3970 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3971 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3972 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3973 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3974 allocationCount += threadResult.AllocationCount;
3975 deallocationCount += threadResult.DeallocationCount;
3976 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3977 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3978 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3979 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3980 }
3981 if(allocationCount)
3982 outResult.AllocationTimeAvg /= allocationCount;
3983 if(deallocationCount)
3984 outResult.DeallocationTimeAvg /= deallocationCount;
3985}
3986
3987static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3988{
3989 if(ptr1 < ptr2)
3990 return ptr1 + size1 > ptr2;
3991 else if(ptr2 < ptr1)
3992 return ptr2 + size2 > ptr1;
3993 else
3994 return true;
3995}
3996
3997static void TestMapping()
3998{
3999 wprintf(L"Testing mapping...\n");
4000
4001 VkResult res;
4002 uint32_t memTypeIndex = UINT32_MAX;
4003
4004 enum TEST
4005 {
4006 TEST_NORMAL,
4007 TEST_POOL,
4008 TEST_DEDICATED,
4009 TEST_COUNT
4010 };
4011 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4012 {
4013 VmaPool pool = nullptr;
4014 if(testIndex == TEST_POOL)
4015 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004016 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004017 VmaPoolCreateInfo poolInfo = {};
4018 poolInfo.memoryTypeIndex = memTypeIndex;
4019 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004020 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004021 }
4022
4023 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4024 bufInfo.size = 0x10000;
4025 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4026
4027 VmaAllocationCreateInfo allocCreateInfo = {};
4028 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4029 allocCreateInfo.pool = pool;
4030 if(testIndex == TEST_DEDICATED)
4031 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4032
4033 VmaAllocationInfo allocInfo;
4034
4035 // Mapped manually
4036
4037 // Create 2 buffers.
4038 BufferInfo bufferInfos[3];
4039 for(size_t i = 0; i < 2; ++i)
4040 {
4041 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4042 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004043 TEST(res == VK_SUCCESS);
4044 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004045 memTypeIndex = allocInfo.memoryType;
4046 }
4047
4048 // Map buffer 0.
4049 char* data00 = nullptr;
4050 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004051 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004052 data00[0xFFFF] = data00[0];
4053
4054 // Map buffer 0 second time.
4055 char* data01 = nullptr;
4056 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004057 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004058
4059 // Map buffer 1.
4060 char* data1 = nullptr;
4061 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004062 TEST(res == VK_SUCCESS && data1 != nullptr);
4063 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004064 data1[0xFFFF] = data1[0];
4065
4066 // Unmap buffer 0 two times.
4067 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4068 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4069 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004070 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004071
4072 // Unmap buffer 1.
4073 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4074 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004075 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004076
4077 // Create 3rd buffer - persistently mapped.
4078 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4079 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4080 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004081 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004082
4083 // Map buffer 2.
4084 char* data2 = nullptr;
4085 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004086 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004087 data2[0xFFFF] = data2[0];
4088
4089 // Unmap buffer 2.
4090 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4091 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004092 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004093
4094 // Destroy all buffers.
4095 for(size_t i = 3; i--; )
4096 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4097
4098 vmaDestroyPool(g_hAllocator, pool);
4099 }
4100}
4101
4102static void TestMappingMultithreaded()
4103{
4104 wprintf(L"Testing mapping multithreaded...\n");
4105
4106 static const uint32_t threadCount = 16;
4107 static const uint32_t bufferCount = 1024;
4108 static const uint32_t threadBufferCount = bufferCount / threadCount;
4109
4110 VkResult res;
4111 volatile uint32_t memTypeIndex = UINT32_MAX;
4112
4113 enum TEST
4114 {
4115 TEST_NORMAL,
4116 TEST_POOL,
4117 TEST_DEDICATED,
4118 TEST_COUNT
4119 };
4120 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4121 {
4122 VmaPool pool = nullptr;
4123 if(testIndex == TEST_POOL)
4124 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004125 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004126 VmaPoolCreateInfo poolInfo = {};
4127 poolInfo.memoryTypeIndex = memTypeIndex;
4128 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004129 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004130 }
4131
4132 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4133 bufCreateInfo.size = 0x10000;
4134 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4135
4136 VmaAllocationCreateInfo allocCreateInfo = {};
4137 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4138 allocCreateInfo.pool = pool;
4139 if(testIndex == TEST_DEDICATED)
4140 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4141
4142 std::thread threads[threadCount];
4143 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4144 {
4145 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4146 // ======== THREAD FUNCTION ========
4147
4148 RandomNumberGenerator rand{threadIndex};
4149
4150 enum class MODE
4151 {
4152 // Don't map this buffer at all.
4153 DONT_MAP,
4154 // Map and quickly unmap.
4155 MAP_FOR_MOMENT,
4156 // Map and unmap before destruction.
4157 MAP_FOR_LONGER,
4158 // Map two times. Quickly unmap, second unmap before destruction.
4159 MAP_TWO_TIMES,
4160 // Create this buffer as persistently mapped.
4161 PERSISTENTLY_MAPPED,
4162 COUNT
4163 };
4164 std::vector<BufferInfo> bufInfos{threadBufferCount};
4165 std::vector<MODE> bufModes{threadBufferCount};
4166
4167 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4168 {
4169 BufferInfo& bufInfo = bufInfos[bufferIndex];
4170 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4171 bufModes[bufferIndex] = mode;
4172
4173 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4174 if(mode == MODE::PERSISTENTLY_MAPPED)
4175 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4176
4177 VmaAllocationInfo allocInfo;
4178 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4179 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004180 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004181
4182 if(memTypeIndex == UINT32_MAX)
4183 memTypeIndex = allocInfo.memoryType;
4184
4185 char* data = nullptr;
4186
4187 if(mode == MODE::PERSISTENTLY_MAPPED)
4188 {
4189 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004190 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004191 }
4192 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4193 mode == MODE::MAP_TWO_TIMES)
4194 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004195 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004196 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004197 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004198
4199 if(mode == MODE::MAP_TWO_TIMES)
4200 {
4201 char* data2 = nullptr;
4202 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004203 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004204 }
4205 }
4206 else if(mode == MODE::DONT_MAP)
4207 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004208 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004209 }
4210 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004211 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004212
4213 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4214 if(data)
4215 data[0xFFFF] = data[0];
4216
4217 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4218 {
4219 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4220
4221 VmaAllocationInfo allocInfo;
4222 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4223 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004224 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004225 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004226 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004227 }
4228
4229 switch(rand.Generate() % 3)
4230 {
4231 case 0: Sleep(0); break; // Yield.
4232 case 1: Sleep(10); break; // 10 ms
4233 // default: No sleep.
4234 }
4235
4236 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4237 if(data)
4238 data[0xFFFF] = data[0];
4239 }
4240
4241 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4242 {
4243 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4244 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4245 {
4246 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4247
4248 VmaAllocationInfo allocInfo;
4249 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004250 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004251 }
4252
4253 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4254 }
4255 });
4256 }
4257
4258 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4259 threads[threadIndex].join();
4260
4261 vmaDestroyPool(g_hAllocator, pool);
4262 }
4263}
4264
4265static void WriteMainTestResultHeader(FILE* file)
4266{
4267 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004268 "Code,Time,"
4269 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004270 "Total Time (us),"
4271 "Allocation Time Min (us),"
4272 "Allocation Time Avg (us),"
4273 "Allocation Time Max (us),"
4274 "Deallocation Time Min (us),"
4275 "Deallocation Time Avg (us),"
4276 "Deallocation Time Max (us),"
4277 "Total Memory Allocated (B),"
4278 "Free Range Size Avg (B),"
4279 "Free Range Size Max (B)\n");
4280}
4281
4282static void WriteMainTestResult(
4283 FILE* file,
4284 const char* codeDescription,
4285 const char* testDescription,
4286 const Config& config, const Result& result)
4287{
4288 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4289 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4290 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4291 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4292 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4293 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4294 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4295
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004296 std::string currTime;
4297 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004298
4299 fprintf(file,
4300 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004301 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4302 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004303 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004304 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004305 totalTimeSeconds * 1e6f,
4306 allocationTimeMinSeconds * 1e6f,
4307 allocationTimeAvgSeconds * 1e6f,
4308 allocationTimeMaxSeconds * 1e6f,
4309 deallocationTimeMinSeconds * 1e6f,
4310 deallocationTimeAvgSeconds * 1e6f,
4311 deallocationTimeMaxSeconds * 1e6f,
4312 result.TotalMemoryAllocated,
4313 result.FreeRangeSizeAvg,
4314 result.FreeRangeSizeMax);
4315}
4316
4317static void WritePoolTestResultHeader(FILE* file)
4318{
4319 fprintf(file,
4320 "Code,Test,Time,"
4321 "Config,"
4322 "Total Time (us),"
4323 "Allocation Time Min (us),"
4324 "Allocation Time Avg (us),"
4325 "Allocation Time Max (us),"
4326 "Deallocation Time Min (us),"
4327 "Deallocation Time Avg (us),"
4328 "Deallocation Time Max (us),"
4329 "Lost Allocation Count,"
4330 "Lost Allocation Total Size (B),"
4331 "Failed Allocation Count,"
4332 "Failed Allocation Total Size (B)\n");
4333}
4334
4335static void WritePoolTestResult(
4336 FILE* file,
4337 const char* codeDescription,
4338 const char* testDescription,
4339 const PoolTestConfig& config,
4340 const PoolTestResult& result)
4341{
4342 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4343 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4344 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4345 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4346 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4347 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4348 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4349
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004350 std::string currTime;
4351 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004352
4353 fprintf(file,
4354 "%s,%s,%s,"
4355 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4356 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4357 // General
4358 codeDescription,
4359 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004360 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004361 // Config
4362 config.ThreadCount,
4363 (unsigned long long)config.PoolSize,
4364 config.FrameCount,
4365 config.TotalItemCount,
4366 config.UsedItemCountMin,
4367 config.UsedItemCountMax,
4368 config.ItemsToMakeUnusedPercent,
4369 // Results
4370 totalTimeSeconds * 1e6f,
4371 allocationTimeMinSeconds * 1e6f,
4372 allocationTimeAvgSeconds * 1e6f,
4373 allocationTimeMaxSeconds * 1e6f,
4374 deallocationTimeMinSeconds * 1e6f,
4375 deallocationTimeAvgSeconds * 1e6f,
4376 deallocationTimeMaxSeconds * 1e6f,
4377 result.LostAllocationCount,
4378 result.LostAllocationTotalSize,
4379 result.FailedAllocationCount,
4380 result.FailedAllocationTotalSize);
4381}
4382
4383static void PerformCustomMainTest(FILE* file)
4384{
4385 Config config{};
4386 config.RandSeed = 65735476;
4387 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4388 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4389 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4390 config.FreeOrder = FREE_ORDER::FORWARD;
4391 config.ThreadCount = 16;
4392 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004393 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004394
4395 // Buffers
4396 //config.AllocationSizes.push_back({4, 16, 1024});
4397 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4398
4399 // Images
4400 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4401 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4402
4403 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4404 config.AdditionalOperationCount = 1024;
4405
4406 Result result{};
4407 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004408 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004409 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4410}
4411
4412static void PerformCustomPoolTest(FILE* file)
4413{
4414 PoolTestConfig config;
4415 config.PoolSize = 100 * 1024 * 1024;
4416 config.RandSeed = 2345764;
4417 config.ThreadCount = 1;
4418 config.FrameCount = 200;
4419 config.ItemsToMakeUnusedPercent = 2;
4420
4421 AllocationSize allocSize = {};
4422 allocSize.BufferSizeMin = 1024;
4423 allocSize.BufferSizeMax = 1024 * 1024;
4424 allocSize.Probability = 1;
4425 config.AllocationSizes.push_back(allocSize);
4426
4427 allocSize.BufferSizeMin = 0;
4428 allocSize.BufferSizeMax = 0;
4429 allocSize.ImageSizeMin = 128;
4430 allocSize.ImageSizeMax = 1024;
4431 allocSize.Probability = 1;
4432 config.AllocationSizes.push_back(allocSize);
4433
4434 config.PoolSize = config.CalcAvgResourceSize() * 200;
4435 config.UsedItemCountMax = 160;
4436 config.TotalItemCount = config.UsedItemCountMax * 10;
4437 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4438
4439 g_MemoryAliasingWarningEnabled = false;
4440 PoolTestResult result = {};
4441 TestPool_Benchmark(result, config);
4442 g_MemoryAliasingWarningEnabled = true;
4443
4444 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4445}
4446
Adam Sawickib8333fb2018-03-13 16:15:53 +01004447static void PerformMainTests(FILE* file)
4448{
4449 uint32_t repeatCount = 1;
4450 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4451
4452 Config config{};
4453 config.RandSeed = 65735476;
4454 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4455 config.FreeOrder = FREE_ORDER::FORWARD;
4456
4457 size_t threadCountCount = 1;
4458 switch(ConfigType)
4459 {
4460 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4461 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4462 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4463 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4464 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4465 default: assert(0);
4466 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004467
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004468 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004469
Adam Sawickib8333fb2018-03-13 16:15:53 +01004470 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4471 {
4472 std::string desc1;
4473
4474 switch(threadCountIndex)
4475 {
4476 case 0:
4477 desc1 += "1_thread";
4478 config.ThreadCount = 1;
4479 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4480 break;
4481 case 1:
4482 desc1 += "16_threads+0%_common";
4483 config.ThreadCount = 16;
4484 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4485 break;
4486 case 2:
4487 desc1 += "16_threads+50%_common";
4488 config.ThreadCount = 16;
4489 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4490 break;
4491 case 3:
4492 desc1 += "16_threads+100%_common";
4493 config.ThreadCount = 16;
4494 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4495 break;
4496 case 4:
4497 desc1 += "2_threads+0%_common";
4498 config.ThreadCount = 2;
4499 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4500 break;
4501 case 5:
4502 desc1 += "2_threads+50%_common";
4503 config.ThreadCount = 2;
4504 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4505 break;
4506 case 6:
4507 desc1 += "2_threads+100%_common";
4508 config.ThreadCount = 2;
4509 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4510 break;
4511 default:
4512 assert(0);
4513 }
4514
4515 // 0 = buffers, 1 = images, 2 = buffers and images
4516 size_t buffersVsImagesCount = 2;
4517 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4518 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4519 {
4520 std::string desc2 = desc1;
4521 switch(buffersVsImagesIndex)
4522 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004523 case 0: desc2 += ",Buffers"; break;
4524 case 1: desc2 += ",Images"; break;
4525 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004526 default: assert(0);
4527 }
4528
4529 // 0 = small, 1 = large, 2 = small and large
4530 size_t smallVsLargeCount = 2;
4531 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4532 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4533 {
4534 std::string desc3 = desc2;
4535 switch(smallVsLargeIndex)
4536 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004537 case 0: desc3 += ",Small"; break;
4538 case 1: desc3 += ",Large"; break;
4539 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004540 default: assert(0);
4541 }
4542
4543 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4544 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4545 else
4546 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4547
4548 // 0 = varying sizes min...max, 1 = set of constant sizes
4549 size_t constantSizesCount = 1;
4550 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4551 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4552 {
4553 std::string desc4 = desc3;
4554 switch(constantSizesIndex)
4555 {
4556 case 0: desc4 += " Varying_sizes"; break;
4557 case 1: desc4 += " Constant_sizes"; break;
4558 default: assert(0);
4559 }
4560
4561 config.AllocationSizes.clear();
4562 // Buffers present
4563 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4564 {
4565 // Small
4566 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4567 {
4568 // Varying size
4569 if(constantSizesIndex == 0)
4570 config.AllocationSizes.push_back({4, 16, 1024});
4571 // Constant sizes
4572 else
4573 {
4574 config.AllocationSizes.push_back({1, 16, 16});
4575 config.AllocationSizes.push_back({1, 64, 64});
4576 config.AllocationSizes.push_back({1, 256, 256});
4577 config.AllocationSizes.push_back({1, 1024, 1024});
4578 }
4579 }
4580 // Large
4581 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4582 {
4583 // Varying size
4584 if(constantSizesIndex == 0)
4585 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4586 // Constant sizes
4587 else
4588 {
4589 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4590 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4591 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4592 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4593 }
4594 }
4595 }
4596 // Images present
4597 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4598 {
4599 // Small
4600 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4601 {
4602 // Varying size
4603 if(constantSizesIndex == 0)
4604 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4605 // Constant sizes
4606 else
4607 {
4608 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4609 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4610 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4611 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4612 }
4613 }
4614 // Large
4615 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4616 {
4617 // Varying size
4618 if(constantSizesIndex == 0)
4619 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4620 // Constant sizes
4621 else
4622 {
4623 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4624 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4625 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4626 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4627 }
4628 }
4629 }
4630
4631 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4632 size_t beginBytesToAllocateCount = 1;
4633 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4634 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4635 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4636 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4637 {
4638 std::string desc5 = desc4;
4639
4640 switch(beginBytesToAllocateIndex)
4641 {
4642 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004643 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004644 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4645 config.AdditionalOperationCount = 0;
4646 break;
4647 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004648 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004649 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4650 config.AdditionalOperationCount = 1024;
4651 break;
4652 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004653 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004654 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4655 config.AdditionalOperationCount = 1024;
4656 break;
4657 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004658 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004659 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4660 config.AdditionalOperationCount = 1024;
4661 break;
4662 default:
4663 assert(0);
4664 }
4665
Adam Sawicki0667e332018-08-24 17:26:44 +02004666 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004667 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004668 std::string desc6 = desc5;
4669 switch(strategyIndex)
4670 {
4671 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004672 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004673 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4674 break;
4675 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004676 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004677 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4678 break;
4679 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004680 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004681 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4682 break;
4683 default:
4684 assert(0);
4685 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004686
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004687 desc6 += ',';
4688 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004689
4690 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004691
4692 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4693 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004694 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004695
4696 Result result{};
4697 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004698 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004699 if(file)
4700 {
4701 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4702 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004703 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004704 }
4705 }
4706 }
4707 }
4708 }
4709 }
4710}
4711
4712static void PerformPoolTests(FILE* file)
4713{
4714 const size_t AVG_RESOURCES_PER_POOL = 300;
4715
4716 uint32_t repeatCount = 1;
4717 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4718
4719 PoolTestConfig config{};
4720 config.RandSeed = 2346343;
4721 config.FrameCount = 200;
4722 config.ItemsToMakeUnusedPercent = 2;
4723
4724 size_t threadCountCount = 1;
4725 switch(ConfigType)
4726 {
4727 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4728 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4729 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4730 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4731 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4732 default: assert(0);
4733 }
4734 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4735 {
4736 std::string desc1;
4737
4738 switch(threadCountIndex)
4739 {
4740 case 0:
4741 desc1 += "1_thread";
4742 config.ThreadCount = 1;
4743 break;
4744 case 1:
4745 desc1 += "16_threads";
4746 config.ThreadCount = 16;
4747 break;
4748 case 2:
4749 desc1 += "2_threads";
4750 config.ThreadCount = 2;
4751 break;
4752 default:
4753 assert(0);
4754 }
4755
4756 // 0 = buffers, 1 = images, 2 = buffers and images
4757 size_t buffersVsImagesCount = 2;
4758 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4759 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4760 {
4761 std::string desc2 = desc1;
4762 switch(buffersVsImagesIndex)
4763 {
4764 case 0: desc2 += " Buffers"; break;
4765 case 1: desc2 += " Images"; break;
4766 case 2: desc2 += " Buffers+Images"; break;
4767 default: assert(0);
4768 }
4769
4770 // 0 = small, 1 = large, 2 = small and large
4771 size_t smallVsLargeCount = 2;
4772 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4773 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4774 {
4775 std::string desc3 = desc2;
4776 switch(smallVsLargeIndex)
4777 {
4778 case 0: desc3 += " Small"; break;
4779 case 1: desc3 += " Large"; break;
4780 case 2: desc3 += " Small+Large"; break;
4781 default: assert(0);
4782 }
4783
4784 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4785 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4786 else
4787 config.PoolSize = 4ull * 1024 * 1024;
4788
4789 // 0 = varying sizes min...max, 1 = set of constant sizes
4790 size_t constantSizesCount = 1;
4791 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4792 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4793 {
4794 std::string desc4 = desc3;
4795 switch(constantSizesIndex)
4796 {
4797 case 0: desc4 += " Varying_sizes"; break;
4798 case 1: desc4 += " Constant_sizes"; break;
4799 default: assert(0);
4800 }
4801
4802 config.AllocationSizes.clear();
4803 // Buffers present
4804 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4805 {
4806 // Small
4807 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4808 {
4809 // Varying size
4810 if(constantSizesIndex == 0)
4811 config.AllocationSizes.push_back({4, 16, 1024});
4812 // Constant sizes
4813 else
4814 {
4815 config.AllocationSizes.push_back({1, 16, 16});
4816 config.AllocationSizes.push_back({1, 64, 64});
4817 config.AllocationSizes.push_back({1, 256, 256});
4818 config.AllocationSizes.push_back({1, 1024, 1024});
4819 }
4820 }
4821 // Large
4822 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4823 {
4824 // Varying size
4825 if(constantSizesIndex == 0)
4826 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4827 // Constant sizes
4828 else
4829 {
4830 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4831 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4832 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4833 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4834 }
4835 }
4836 }
4837 // Images present
4838 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4839 {
4840 // Small
4841 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4842 {
4843 // Varying size
4844 if(constantSizesIndex == 0)
4845 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4846 // Constant sizes
4847 else
4848 {
4849 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4850 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4851 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4852 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4853 }
4854 }
4855 // Large
4856 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4857 {
4858 // Varying size
4859 if(constantSizesIndex == 0)
4860 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4861 // Constant sizes
4862 else
4863 {
4864 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4865 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4866 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4867 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4868 }
4869 }
4870 }
4871
4872 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4873 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4874
4875 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4876 size_t subscriptionModeCount;
4877 switch(ConfigType)
4878 {
4879 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4880 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4881 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4882 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4883 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4884 default: assert(0);
4885 }
4886 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4887 {
4888 std::string desc5 = desc4;
4889
4890 switch(subscriptionModeIndex)
4891 {
4892 case 0:
4893 desc5 += " Subscription_66%";
4894 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4895 break;
4896 case 1:
4897 desc5 += " Subscription_133%";
4898 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4899 break;
4900 case 2:
4901 desc5 += " Subscription_100%";
4902 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4903 break;
4904 case 3:
4905 desc5 += " Subscription_33%";
4906 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4907 break;
4908 case 4:
4909 desc5 += " Subscription_166%";
4910 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4911 break;
4912 default:
4913 assert(0);
4914 }
4915
4916 config.TotalItemCount = config.UsedItemCountMax * 5;
4917 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4918
4919 const char* testDescription = desc5.c_str();
4920
4921 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4922 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004923 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004924
4925 PoolTestResult result{};
4926 g_MemoryAliasingWarningEnabled = false;
4927 TestPool_Benchmark(result, config);
4928 g_MemoryAliasingWarningEnabled = true;
4929 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4930 }
4931 }
4932 }
4933 }
4934 }
4935 }
4936}
4937
Adam Sawickia83793a2018-09-03 13:40:42 +02004938static void BasicTestBuddyAllocator()
4939{
4940 wprintf(L"Basic test buddy allocator\n");
4941
4942 RandomNumberGenerator rand{76543};
4943
4944 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4945 sampleBufCreateInfo.size = 1024; // Whatever.
4946 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4947
4948 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4949 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4950
4951 VmaPoolCreateInfo poolCreateInfo = {};
4952 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004953 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004954
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004955 // Deliberately adding 1023 to test usable size smaller than memory block size.
4956 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004957 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004958 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004959
4960 VmaPool pool = nullptr;
4961 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004962 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004963
4964 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4965
4966 VmaAllocationCreateInfo allocCreateInfo = {};
4967 allocCreateInfo.pool = pool;
4968
4969 std::vector<BufferInfo> bufInfo;
4970 BufferInfo newBufInfo;
4971 VmaAllocationInfo allocInfo;
4972
4973 bufCreateInfo.size = 1024 * 256;
4974 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4975 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004976 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004977 bufInfo.push_back(newBufInfo);
4978
4979 bufCreateInfo.size = 1024 * 512;
4980 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4981 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004982 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004983 bufInfo.push_back(newBufInfo);
4984
4985 bufCreateInfo.size = 1024 * 128;
4986 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4987 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004988 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004989 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004990
4991 // Test very small allocation, smaller than minimum node size.
4992 bufCreateInfo.size = 1;
4993 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4994 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004995 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004996 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004997
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004998 // Test some small allocation with alignment requirement.
4999 {
5000 VkMemoryRequirements memReq;
5001 memReq.alignment = 256;
5002 memReq.memoryTypeBits = UINT32_MAX;
5003 memReq.size = 32;
5004
5005 newBufInfo.Buffer = VK_NULL_HANDLE;
5006 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
5007 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005008 TEST(res == VK_SUCCESS);
5009 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005010 bufInfo.push_back(newBufInfo);
5011 }
5012
5013 //SaveAllocatorStatsToFile(L"TEST.json");
5014
Adam Sawicki21017c62018-09-07 15:26:59 +02005015 VmaPoolStats stats = {};
5016 vmaGetPoolStats(g_hAllocator, pool, &stats);
5017 int DBG = 0; // Set breakpoint here to inspect `stats`.
5018
Adam Sawicki80927152018-09-07 17:27:23 +02005019 // Allocate enough new buffers to surely fall into second block.
5020 for(uint32_t i = 0; i < 32; ++i)
5021 {
5022 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
5023 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5024 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005025 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02005026 bufInfo.push_back(newBufInfo);
5027 }
5028
5029 SaveAllocatorStatsToFile(L"BuddyTest01.json");
5030
Adam Sawickia83793a2018-09-03 13:40:42 +02005031 // Destroy the buffers in random order.
5032 while(!bufInfo.empty())
5033 {
5034 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
5035 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
5036 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
5037 bufInfo.erase(bufInfo.begin() + indexToDestroy);
5038 }
5039
5040 vmaDestroyPool(g_hAllocator, pool);
5041}
5042
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005043static void BasicTestAllocatePages()
5044{
5045 wprintf(L"Basic test allocate pages\n");
5046
5047 RandomNumberGenerator rand{765461};
5048
5049 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5050 sampleBufCreateInfo.size = 1024; // Whatever.
5051 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5052
5053 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5054 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5055
5056 VmaPoolCreateInfo poolCreateInfo = {};
5057 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02005058 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005059
5060 // 1 block of 1 MB.
5061 poolCreateInfo.blockSize = 1024 * 1024;
5062 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
5063
5064 // Create pool.
5065 VmaPool pool = nullptr;
5066 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02005067 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005068
5069 // Make 100 allocations of 4 KB - they should fit into the pool.
5070 VkMemoryRequirements memReq;
5071 memReq.memoryTypeBits = UINT32_MAX;
5072 memReq.alignment = 4 * 1024;
5073 memReq.size = 4 * 1024;
5074
5075 VmaAllocationCreateInfo allocCreateInfo = {};
5076 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5077 allocCreateInfo.pool = pool;
5078
5079 constexpr uint32_t allocCount = 100;
5080
5081 std::vector<VmaAllocation> alloc{allocCount};
5082 std::vector<VmaAllocationInfo> allocInfo{allocCount};
5083 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005084 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005085 for(uint32_t i = 0; i < allocCount; ++i)
5086 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005087 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005088 allocInfo[i].pMappedData != nullptr &&
5089 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5090 allocInfo[i].memoryType == allocInfo[0].memoryType);
5091 }
5092
5093 // Free the allocations.
5094 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5095 std::fill(alloc.begin(), alloc.end(), nullptr);
5096 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5097
5098 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5099 // Also test optional allocationInfo = null.
5100 memReq.size = 100 * 1024;
5101 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005102 TEST(res != VK_SUCCESS);
5103 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005104
5105 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5106 memReq.size = 4 * 1024;
5107 memReq.alignment = 128 * 1024;
5108 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005109 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005110
5111 // Make 100 dedicated allocations of 4 KB.
5112 memReq.alignment = 4 * 1024;
5113 memReq.size = 4 * 1024;
5114
5115 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5116 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5117 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5118 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005119 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005120 for(uint32_t i = 0; i < allocCount; ++i)
5121 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005122 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005123 allocInfo[i].pMappedData != nullptr &&
5124 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5125 allocInfo[i].offset == 0);
5126 if(i > 0)
5127 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005128 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005129 }
5130 }
5131
5132 // Free the allocations.
5133 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5134 std::fill(alloc.begin(), alloc.end(), nullptr);
5135 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5136
5137 vmaDestroyPool(g_hAllocator, pool);
5138}
5139
Adam Sawickif2975342018-10-16 13:49:02 +02005140// Test the testing environment.
5141static void TestGpuData()
5142{
5143 RandomNumberGenerator rand = { 53434 };
5144
5145 std::vector<AllocInfo> allocInfo;
5146
5147 for(size_t i = 0; i < 100; ++i)
5148 {
5149 AllocInfo info = {};
5150
5151 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5152 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5153 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5154 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5155 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5156
5157 VmaAllocationCreateInfo allocCreateInfo = {};
5158 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5159
5160 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5161 TEST(res == VK_SUCCESS);
5162
5163 info.m_StartValue = rand.Generate();
5164
5165 allocInfo.push_back(std::move(info));
5166 }
5167
5168 UploadGpuData(allocInfo.data(), allocInfo.size());
5169
5170 ValidateGpuData(allocInfo.data(), allocInfo.size());
5171
5172 DestroyAllAllocations(allocInfo);
5173}
5174
Adam Sawickib8333fb2018-03-13 16:15:53 +01005175void Test()
5176{
5177 wprintf(L"TESTING:\n");
5178
Adam Sawicki5c8af7b2018-12-10 13:34:54 +01005179 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005180 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005181 ////////////////////////////////////////////////////////////////////////////////
5182 // Temporarily insert custom tests here:
Adam Sawicki80927152018-09-07 17:27:23 +02005183
Adam Sawicki70a683e2018-08-24 15:36:32 +02005184 return;
5185 }
5186
Adam Sawickib8333fb2018-03-13 16:15:53 +01005187 // # Simple tests
5188
5189 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005190 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005191#if VMA_DEBUG_MARGIN
5192 TestDebugMargin();
5193#else
5194 TestPool_SameSize();
5195 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01005196 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02005197#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005198#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5199 TestAllocationsInitialization();
5200#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005201 TestMapping();
5202 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005203 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005204 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005205 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005206
Adam Sawicki4338f662018-09-07 14:12:37 +02005207 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005208 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005209
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005210 {
5211 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005212 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005213 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005214 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005215 fclose(file);
5216 }
5217
Adam Sawickib8333fb2018-03-13 16:15:53 +01005218 TestDefragmentationSimple();
5219 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005220 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005221 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005222
5223 // # Detailed tests
5224 FILE* file;
5225 fopen_s(&file, "Results.csv", "w");
5226 assert(file != NULL);
5227
5228 WriteMainTestResultHeader(file);
5229 PerformMainTests(file);
5230 //PerformCustomMainTest(file);
5231
5232 WritePoolTestResultHeader(file);
5233 PerformPoolTests(file);
5234 //PerformCustomPoolTest(file);
5235
5236 fclose(file);
5237
5238 wprintf(L"Done.\n");
5239}
5240
Adam Sawickif1a793c2018-03-13 15:42:22 +01005241#endif // #ifdef _WIN32