blob: aececf3db29c39020c8ca536f2a8085568cec353 [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
2// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +010029
30#ifdef _WIN32
31
Adam Sawicki33d2ce72018-08-27 13:59:13 +020032static const char* CODE_DESCRIPTION = "Foo";
33
Adam Sawickif2975342018-10-16 13:49:02 +020034extern VkCommandBuffer g_hTemporaryCommandBuffer;
35void BeginSingleTimeCommands();
36void EndSingleTimeCommands();
37
Adam Sawickibdb89a92018-12-13 11:56:30 +010038#ifndef VMA_DEBUG_MARGIN
39 #define VMA_DEBUG_MARGIN 0
40#endif
41
Adam Sawicki0a607132018-08-24 11:18:41 +020042enum CONFIG_TYPE {
43 CONFIG_TYPE_MINIMUM,
44 CONFIG_TYPE_SMALL,
45 CONFIG_TYPE_AVERAGE,
46 CONFIG_TYPE_LARGE,
47 CONFIG_TYPE_MAXIMUM,
48 CONFIG_TYPE_COUNT
49};
50
Adam Sawickif2975342018-10-16 13:49:02 +020051static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
52//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020053
Adam Sawickib8333fb2018-03-13 16:15:53 +010054enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
55
Adam Sawicki0667e332018-08-24 17:26:44 +020056static const char* FREE_ORDER_NAMES[] = {
57 "FORWARD",
58 "BACKWARD",
59 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020060};
61
Adam Sawicki80927152018-09-07 17:27:23 +020062// Copy of internal VmaAlgorithmToStr.
63static const char* AlgorithmToStr(uint32_t algorithm)
64{
65 switch(algorithm)
66 {
67 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
68 return "Linear";
69 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
70 return "Buddy";
71 case 0:
72 return "Default";
73 default:
74 assert(0);
75 return "";
76 }
77}
78
Adam Sawickib8333fb2018-03-13 16:15:53 +010079struct AllocationSize
80{
81 uint32_t Probability;
82 VkDeviceSize BufferSizeMin, BufferSizeMax;
83 uint32_t ImageSizeMin, ImageSizeMax;
84};
85
86struct Config
87{
88 uint32_t RandSeed;
89 VkDeviceSize BeginBytesToAllocate;
90 uint32_t AdditionalOperationCount;
91 VkDeviceSize MaxBytesToAllocate;
92 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
93 std::vector<AllocationSize> AllocationSizes;
94 uint32_t ThreadCount;
95 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
96 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020097 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010098};
99
100struct Result
101{
102 duration TotalTime;
103 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
104 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
105 VkDeviceSize TotalMemoryAllocated;
106 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
107};
108
109void TestDefragmentationSimple();
110void TestDefragmentationFull();
111
112struct PoolTestConfig
113{
114 uint32_t RandSeed;
115 uint32_t ThreadCount;
116 VkDeviceSize PoolSize;
117 uint32_t FrameCount;
118 uint32_t TotalItemCount;
119 // Range for number of items used in each frame.
120 uint32_t UsedItemCountMin, UsedItemCountMax;
121 // Percent of items to make unused, and possibly make some others used in each frame.
122 uint32_t ItemsToMakeUnusedPercent;
123 std::vector<AllocationSize> AllocationSizes;
124
125 VkDeviceSize CalcAvgResourceSize() const
126 {
127 uint32_t probabilitySum = 0;
128 VkDeviceSize sizeSum = 0;
129 for(size_t i = 0; i < AllocationSizes.size(); ++i)
130 {
131 const AllocationSize& allocSize = AllocationSizes[i];
132 if(allocSize.BufferSizeMax > 0)
133 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
134 else
135 {
136 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
137 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
138 }
139 probabilitySum += allocSize.Probability;
140 }
141 return sizeSum / probabilitySum;
142 }
143
144 bool UsesBuffers() const
145 {
146 for(size_t i = 0; i < AllocationSizes.size(); ++i)
147 if(AllocationSizes[i].BufferSizeMax > 0)
148 return true;
149 return false;
150 }
151
152 bool UsesImages() const
153 {
154 for(size_t i = 0; i < AllocationSizes.size(); ++i)
155 if(AllocationSizes[i].ImageSizeMax > 0)
156 return true;
157 return false;
158 }
159};
160
161struct PoolTestResult
162{
163 duration TotalTime;
164 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
165 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
166 size_t LostAllocationCount, LostAllocationTotalSize;
167 size_t FailedAllocationCount, FailedAllocationTotalSize;
168};
169
170static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
171
Adam Sawicki51fa9662018-10-03 13:44:29 +0200172uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200173
Adam Sawickib8333fb2018-03-13 16:15:53 +0100174struct BufferInfo
175{
176 VkBuffer Buffer = VK_NULL_HANDLE;
177 VmaAllocation Allocation = VK_NULL_HANDLE;
178};
179
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200180static uint32_t GetAllocationStrategyCount()
181{
182 uint32_t strategyCount = 0;
183 switch(ConfigType)
184 {
185 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
186 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
187 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
188 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
189 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
190 default: assert(0);
191 }
192 return strategyCount;
193}
194
195static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
196{
197 switch(allocStrategy)
198 {
199 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
200 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
201 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
202 case 0: return "Default"; break;
203 default: assert(0); return "";
204 }
205}
206
Adam Sawickib8333fb2018-03-13 16:15:53 +0100207static void InitResult(Result& outResult)
208{
209 outResult.TotalTime = duration::zero();
210 outResult.AllocationTimeMin = duration::max();
211 outResult.AllocationTimeAvg = duration::zero();
212 outResult.AllocationTimeMax = duration::min();
213 outResult.DeallocationTimeMin = duration::max();
214 outResult.DeallocationTimeAvg = duration::zero();
215 outResult.DeallocationTimeMax = duration::min();
216 outResult.TotalMemoryAllocated = 0;
217 outResult.FreeRangeSizeAvg = 0;
218 outResult.FreeRangeSizeMax = 0;
219}
220
221class TimeRegisterObj
222{
223public:
224 TimeRegisterObj(duration& min, duration& sum, duration& max) :
225 m_Min(min),
226 m_Sum(sum),
227 m_Max(max),
228 m_TimeBeg(std::chrono::high_resolution_clock::now())
229 {
230 }
231
232 ~TimeRegisterObj()
233 {
234 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
235 m_Sum += d;
236 if(d < m_Min) m_Min = d;
237 if(d > m_Max) m_Max = d;
238 }
239
240private:
241 duration& m_Min;
242 duration& m_Sum;
243 duration& m_Max;
244 time_point m_TimeBeg;
245};
246
247struct PoolTestThreadResult
248{
249 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
250 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
251 size_t AllocationCount, DeallocationCount;
252 size_t LostAllocationCount, LostAllocationTotalSize;
253 size_t FailedAllocationCount, FailedAllocationTotalSize;
254};
255
256class AllocationTimeRegisterObj : public TimeRegisterObj
257{
258public:
259 AllocationTimeRegisterObj(Result& result) :
260 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
261 {
262 }
263};
264
265class DeallocationTimeRegisterObj : public TimeRegisterObj
266{
267public:
268 DeallocationTimeRegisterObj(Result& result) :
269 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
270 {
271 }
272};
273
274class PoolAllocationTimeRegisterObj : public TimeRegisterObj
275{
276public:
277 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
278 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
279 {
280 }
281};
282
283class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
284{
285public:
286 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
287 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
288 {
289 }
290};
291
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200292static void CurrentTimeToStr(std::string& out)
293{
294 time_t rawTime; time(&rawTime);
295 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
296 char timeStr[128];
297 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
298 out = timeStr;
299}
300
Adam Sawickib8333fb2018-03-13 16:15:53 +0100301VkResult MainTest(Result& outResult, const Config& config)
302{
303 assert(config.ThreadCount > 0);
304
305 InitResult(outResult);
306
307 RandomNumberGenerator mainRand{config.RandSeed};
308
309 time_point timeBeg = std::chrono::high_resolution_clock::now();
310
311 std::atomic<size_t> allocationCount = 0;
312 VkResult res = VK_SUCCESS;
313
314 uint32_t memUsageProbabilitySum =
315 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
316 config.MemUsageProbability[2] + config.MemUsageProbability[3];
317 assert(memUsageProbabilitySum > 0);
318
319 uint32_t allocationSizeProbabilitySum = std::accumulate(
320 config.AllocationSizes.begin(),
321 config.AllocationSizes.end(),
322 0u,
323 [](uint32_t sum, const AllocationSize& allocSize) {
324 return sum + allocSize.Probability;
325 });
326
327 struct Allocation
328 {
329 VkBuffer Buffer;
330 VkImage Image;
331 VmaAllocation Alloc;
332 };
333
334 std::vector<Allocation> commonAllocations;
335 std::mutex commonAllocationsMutex;
336
337 auto Allocate = [&](
338 VkDeviceSize bufferSize,
339 const VkExtent2D imageExtent,
340 RandomNumberGenerator& localRand,
341 VkDeviceSize& totalAllocatedBytes,
342 std::vector<Allocation>& allocations) -> VkResult
343 {
344 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
345
346 uint32_t memUsageIndex = 0;
347 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
348 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
349 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
350
351 VmaAllocationCreateInfo memReq = {};
352 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200353 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100354
355 Allocation allocation = {};
356 VmaAllocationInfo allocationInfo;
357
358 // Buffer
359 if(bufferSize > 0)
360 {
361 assert(imageExtent.width == 0);
362 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
363 bufferInfo.size = bufferSize;
364 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
365
366 {
367 AllocationTimeRegisterObj timeRegisterObj{outResult};
368 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
369 }
370 }
371 // Image
372 else
373 {
374 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
375 imageInfo.imageType = VK_IMAGE_TYPE_2D;
376 imageInfo.extent.width = imageExtent.width;
377 imageInfo.extent.height = imageExtent.height;
378 imageInfo.extent.depth = 1;
379 imageInfo.mipLevels = 1;
380 imageInfo.arrayLayers = 1;
381 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
382 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
383 VK_IMAGE_TILING_OPTIMAL :
384 VK_IMAGE_TILING_LINEAR;
385 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
386 switch(memReq.usage)
387 {
388 case VMA_MEMORY_USAGE_GPU_ONLY:
389 switch(localRand.Generate() % 3)
390 {
391 case 0:
392 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
393 break;
394 case 1:
395 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
396 break;
397 case 2:
398 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
399 break;
400 }
401 break;
402 case VMA_MEMORY_USAGE_CPU_ONLY:
403 case VMA_MEMORY_USAGE_CPU_TO_GPU:
404 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
405 break;
406 case VMA_MEMORY_USAGE_GPU_TO_CPU:
407 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
408 break;
409 }
410 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
411 imageInfo.flags = 0;
412
413 {
414 AllocationTimeRegisterObj timeRegisterObj{outResult};
415 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
416 }
417 }
418
419 if(res == VK_SUCCESS)
420 {
421 ++allocationCount;
422 totalAllocatedBytes += allocationInfo.size;
423 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
424 if(useCommonAllocations)
425 {
426 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
427 commonAllocations.push_back(allocation);
428 }
429 else
430 allocations.push_back(allocation);
431 }
432 else
433 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200434 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100435 }
436 return res;
437 };
438
439 auto GetNextAllocationSize = [&](
440 VkDeviceSize& outBufSize,
441 VkExtent2D& outImageSize,
442 RandomNumberGenerator& localRand)
443 {
444 outBufSize = 0;
445 outImageSize = {0, 0};
446
447 uint32_t allocSizeIndex = 0;
448 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
449 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
450 r -= config.AllocationSizes[allocSizeIndex++].Probability;
451
452 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
453 if(allocSize.BufferSizeMax > 0)
454 {
455 assert(allocSize.ImageSizeMax == 0);
456 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
457 outBufSize = allocSize.BufferSizeMin;
458 else
459 {
460 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
461 outBufSize = outBufSize / 16 * 16;
462 }
463 }
464 else
465 {
466 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
467 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
468 else
469 {
470 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
471 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
472 }
473 }
474 };
475
476 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
477 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
478
479 auto ThreadProc = [&](uint32_t randSeed) -> void
480 {
481 RandomNumberGenerator threadRand(randSeed);
482 VkDeviceSize threadTotalAllocatedBytes = 0;
483 std::vector<Allocation> threadAllocations;
484 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
485 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
486 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
487
488 // BEGIN ALLOCATIONS
489 for(;;)
490 {
491 VkDeviceSize bufferSize = 0;
492 VkExtent2D imageExtent = {};
493 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
494 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
495 threadBeginBytesToAllocate)
496 {
497 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
498 break;
499 }
500 else
501 break;
502 }
503
504 // ADDITIONAL ALLOCATIONS AND FREES
505 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
506 {
507 VkDeviceSize bufferSize = 0;
508 VkExtent2D imageExtent = {};
509 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
510
511 // true = allocate, false = free
512 bool allocate = threadRand.Generate() % 2 != 0;
513
514 if(allocate)
515 {
516 if(threadTotalAllocatedBytes +
517 bufferSize +
518 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
519 threadMaxBytesToAllocate)
520 {
521 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
522 break;
523 }
524 }
525 else
526 {
527 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
528 if(useCommonAllocations)
529 {
530 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
531 if(!commonAllocations.empty())
532 {
533 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
534 VmaAllocationInfo allocationInfo;
535 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
536 if(threadTotalAllocatedBytes >= allocationInfo.size)
537 {
538 DeallocationTimeRegisterObj timeRegisterObj{outResult};
539 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
540 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
541 else
542 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
543 threadTotalAllocatedBytes -= allocationInfo.size;
544 commonAllocations.erase(commonAllocations.begin() + indexToFree);
545 }
546 }
547 }
548 else
549 {
550 if(!threadAllocations.empty())
551 {
552 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
553 VmaAllocationInfo allocationInfo;
554 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
555 if(threadTotalAllocatedBytes >= allocationInfo.size)
556 {
557 DeallocationTimeRegisterObj timeRegisterObj{outResult};
558 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
559 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
560 else
561 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
562 threadTotalAllocatedBytes -= allocationInfo.size;
563 threadAllocations.erase(threadAllocations.begin() + indexToFree);
564 }
565 }
566 }
567 }
568 }
569
570 ++numThreadsReachedMaxAllocations;
571
572 WaitForSingleObject(threadsFinishEvent, INFINITE);
573
574 // DEALLOCATION
575 while(!threadAllocations.empty())
576 {
577 size_t indexToFree = 0;
578 switch(config.FreeOrder)
579 {
580 case FREE_ORDER::FORWARD:
581 indexToFree = 0;
582 break;
583 case FREE_ORDER::BACKWARD:
584 indexToFree = threadAllocations.size() - 1;
585 break;
586 case FREE_ORDER::RANDOM:
587 indexToFree = mainRand.Generate() % threadAllocations.size();
588 break;
589 }
590
591 {
592 DeallocationTimeRegisterObj timeRegisterObj{outResult};
593 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
594 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
595 else
596 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
597 }
598 threadAllocations.erase(threadAllocations.begin() + indexToFree);
599 }
600 };
601
602 uint32_t threadRandSeed = mainRand.Generate();
603 std::vector<std::thread> bkgThreads;
604 for(size_t i = 0; i < config.ThreadCount; ++i)
605 {
606 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
607 }
608
609 // Wait for threads reached max allocations
610 while(numThreadsReachedMaxAllocations < config.ThreadCount)
611 Sleep(0);
612
613 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
614 VmaStats vmaStats = {};
615 vmaCalculateStats(g_hAllocator, &vmaStats);
616 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
617 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
618 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
619
620 // Signal threads to deallocate
621 SetEvent(threadsFinishEvent);
622
623 // Wait for threads finished
624 for(size_t i = 0; i < bkgThreads.size(); ++i)
625 bkgThreads[i].join();
626 bkgThreads.clear();
627
628 CloseHandle(threadsFinishEvent);
629
630 // Deallocate remaining common resources
631 while(!commonAllocations.empty())
632 {
633 size_t indexToFree = 0;
634 switch(config.FreeOrder)
635 {
636 case FREE_ORDER::FORWARD:
637 indexToFree = 0;
638 break;
639 case FREE_ORDER::BACKWARD:
640 indexToFree = commonAllocations.size() - 1;
641 break;
642 case FREE_ORDER::RANDOM:
643 indexToFree = mainRand.Generate() % commonAllocations.size();
644 break;
645 }
646
647 {
648 DeallocationTimeRegisterObj timeRegisterObj{outResult};
649 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
650 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
651 else
652 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
653 }
654 commonAllocations.erase(commonAllocations.begin() + indexToFree);
655 }
656
657 if(allocationCount)
658 {
659 outResult.AllocationTimeAvg /= allocationCount;
660 outResult.DeallocationTimeAvg /= allocationCount;
661 }
662
663 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
664
665 return res;
666}
667
Adam Sawicki51fa9662018-10-03 13:44:29 +0200668void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100669{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100670 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100671 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200672 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100673 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200674 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100675}
676
677struct AllocInfo
678{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200679 VmaAllocation m_Allocation = VK_NULL_HANDLE;
680 VkBuffer m_Buffer = VK_NULL_HANDLE;
681 VkImage m_Image = VK_NULL_HANDLE;
682 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100683 union
684 {
685 VkBufferCreateInfo m_BufferInfo;
686 VkImageCreateInfo m_ImageInfo;
687 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200688
689 void CreateBuffer(
690 const VkBufferCreateInfo& bufCreateInfo,
691 const VmaAllocationCreateInfo& allocCreateInfo);
692 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100693};
694
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200695void AllocInfo::CreateBuffer(
696 const VkBufferCreateInfo& bufCreateInfo,
697 const VmaAllocationCreateInfo& allocCreateInfo)
698{
699 m_BufferInfo = bufCreateInfo;
700 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
701 TEST(res == VK_SUCCESS);
702}
703
704void AllocInfo::Destroy()
705{
706 if(m_Image)
707 {
708 vkDestroyImage(g_hDevice, m_Image, nullptr);
709 }
710 if(m_Buffer)
711 {
712 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
713 }
714 if(m_Allocation)
715 {
716 vmaFreeMemory(g_hAllocator, m_Allocation);
717 }
718}
719
Adam Sawickif2975342018-10-16 13:49:02 +0200720class StagingBufferCollection
721{
722public:
723 StagingBufferCollection() { }
724 ~StagingBufferCollection();
725 // Returns false if maximum total size of buffers would be exceeded.
726 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
727 void ReleaseAllBuffers();
728
729private:
730 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
731 struct BufInfo
732 {
733 VmaAllocation Allocation = VK_NULL_HANDLE;
734 VkBuffer Buffer = VK_NULL_HANDLE;
735 VkDeviceSize Size = VK_WHOLE_SIZE;
736 void* MappedPtr = nullptr;
737 bool Used = false;
738 };
739 std::vector<BufInfo> m_Bufs;
740 // Including both used and unused.
741 VkDeviceSize m_TotalSize = 0;
742};
743
744StagingBufferCollection::~StagingBufferCollection()
745{
746 for(size_t i = m_Bufs.size(); i--; )
747 {
748 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
749 }
750}
751
752bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
753{
754 assert(size <= MAX_TOTAL_SIZE);
755
756 // Try to find existing unused buffer with best size.
757 size_t bestIndex = SIZE_MAX;
758 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
759 {
760 BufInfo& currBufInfo = m_Bufs[i];
761 if(!currBufInfo.Used && currBufInfo.Size >= size &&
762 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
763 {
764 bestIndex = i;
765 }
766 }
767
768 if(bestIndex != SIZE_MAX)
769 {
770 m_Bufs[bestIndex].Used = true;
771 outBuffer = m_Bufs[bestIndex].Buffer;
772 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
773 return true;
774 }
775
776 // Allocate new buffer with requested size.
777 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
778 {
779 BufInfo bufInfo;
780 bufInfo.Size = size;
781 bufInfo.Used = true;
782
783 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
784 bufCreateInfo.size = size;
785 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
786
787 VmaAllocationCreateInfo allocCreateInfo = {};
788 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
789 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
790
791 VmaAllocationInfo allocInfo;
792 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
793 bufInfo.MappedPtr = allocInfo.pMappedData;
794 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
795
796 outBuffer = bufInfo.Buffer;
797 outMappedPtr = bufInfo.MappedPtr;
798
799 m_Bufs.push_back(std::move(bufInfo));
800
801 m_TotalSize += size;
802
803 return true;
804 }
805
806 // There are some unused but smaller buffers: Free them and try again.
807 bool hasUnused = false;
808 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
809 {
810 if(!m_Bufs[i].Used)
811 {
812 hasUnused = true;
813 break;
814 }
815 }
816 if(hasUnused)
817 {
818 for(size_t i = m_Bufs.size(); i--; )
819 {
820 if(!m_Bufs[i].Used)
821 {
822 m_TotalSize -= m_Bufs[i].Size;
823 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
824 m_Bufs.erase(m_Bufs.begin() + i);
825 }
826 }
827
828 return AcquireBuffer(size, outBuffer, outMappedPtr);
829 }
830
831 return false;
832}
833
834void StagingBufferCollection::ReleaseAllBuffers()
835{
836 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
837 {
838 m_Bufs[i].Used = false;
839 }
840}
841
842static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
843{
844 StagingBufferCollection stagingBufs;
845
846 bool cmdBufferStarted = false;
847 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
848 {
849 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
850 if(currAllocInfo.m_Buffer)
851 {
852 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
853
854 VkBuffer stagingBuf = VK_NULL_HANDLE;
855 void* stagingBufMappedPtr = nullptr;
856 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
857 {
858 TEST(cmdBufferStarted);
859 EndSingleTimeCommands();
860 stagingBufs.ReleaseAllBuffers();
861 cmdBufferStarted = false;
862
863 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
864 TEST(ok);
865 }
866
867 // Fill staging buffer.
868 {
869 assert(size % sizeof(uint32_t) == 0);
870 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
871 uint32_t val = currAllocInfo.m_StartValue;
872 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
873 {
874 *stagingValPtr = val;
875 ++stagingValPtr;
876 ++val;
877 }
878 }
879
880 // Issue copy command from staging buffer to destination buffer.
881 if(!cmdBufferStarted)
882 {
883 cmdBufferStarted = true;
884 BeginSingleTimeCommands();
885 }
886
887 VkBufferCopy copy = {};
888 copy.srcOffset = 0;
889 copy.dstOffset = 0;
890 copy.size = size;
891 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
892 }
893 else
894 {
895 TEST(0 && "Images not currently supported.");
896 }
897 }
898
899 if(cmdBufferStarted)
900 {
901 EndSingleTimeCommands();
902 stagingBufs.ReleaseAllBuffers();
903 }
904}
905
906static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
907{
908 StagingBufferCollection stagingBufs;
909
910 bool cmdBufferStarted = false;
911 size_t validateAllocIndexOffset = 0;
912 std::vector<void*> validateStagingBuffers;
913 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
914 {
915 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
916 if(currAllocInfo.m_Buffer)
917 {
918 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
919
920 VkBuffer stagingBuf = VK_NULL_HANDLE;
921 void* stagingBufMappedPtr = nullptr;
922 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
923 {
924 TEST(cmdBufferStarted);
925 EndSingleTimeCommands();
926 cmdBufferStarted = false;
927
928 for(size_t validateIndex = 0;
929 validateIndex < validateStagingBuffers.size();
930 ++validateIndex)
931 {
932 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
933 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
934 TEST(validateSize % sizeof(uint32_t) == 0);
935 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
936 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
937 bool valid = true;
938 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
939 {
940 if(*stagingValPtr != val)
941 {
942 valid = false;
943 break;
944 }
945 ++stagingValPtr;
946 ++val;
947 }
948 TEST(valid);
949 }
950
951 stagingBufs.ReleaseAllBuffers();
952
953 validateAllocIndexOffset = allocInfoIndex;
954 validateStagingBuffers.clear();
955
956 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
957 TEST(ok);
958 }
959
960 // Issue copy command from staging buffer to destination buffer.
961 if(!cmdBufferStarted)
962 {
963 cmdBufferStarted = true;
964 BeginSingleTimeCommands();
965 }
966
967 VkBufferCopy copy = {};
968 copy.srcOffset = 0;
969 copy.dstOffset = 0;
970 copy.size = size;
971 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
972
973 // Sava mapped pointer for later validation.
974 validateStagingBuffers.push_back(stagingBufMappedPtr);
975 }
976 else
977 {
978 TEST(0 && "Images not currently supported.");
979 }
980 }
981
982 if(cmdBufferStarted)
983 {
984 EndSingleTimeCommands();
985
986 for(size_t validateIndex = 0;
987 validateIndex < validateStagingBuffers.size();
988 ++validateIndex)
989 {
990 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
991 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
992 TEST(validateSize % sizeof(uint32_t) == 0);
993 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
994 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
995 bool valid = true;
996 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
997 {
998 if(*stagingValPtr != val)
999 {
1000 valid = false;
1001 break;
1002 }
1003 ++stagingValPtr;
1004 ++val;
1005 }
1006 TEST(valid);
1007 }
1008
1009 stagingBufs.ReleaseAllBuffers();
1010 }
1011}
1012
Adam Sawickib8333fb2018-03-13 16:15:53 +01001013static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1014{
1015 outMemReq = {};
1016 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1017 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1018}
1019
1020static void CreateBuffer(
1021 VmaPool pool,
1022 const VkBufferCreateInfo& bufCreateInfo,
1023 bool persistentlyMapped,
1024 AllocInfo& outAllocInfo)
1025{
1026 outAllocInfo = {};
1027 outAllocInfo.m_BufferInfo = bufCreateInfo;
1028
1029 VmaAllocationCreateInfo allocCreateInfo = {};
1030 allocCreateInfo.pool = pool;
1031 if(persistentlyMapped)
1032 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1033
1034 VmaAllocationInfo vmaAllocInfo = {};
1035 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1036
1037 // Setup StartValue and fill.
1038 {
1039 outAllocInfo.m_StartValue = (uint32_t)rand();
1040 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001041 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001042 if(!persistentlyMapped)
1043 {
1044 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1045 }
1046
1047 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001048 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001049 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1050 data[i] = value++;
1051
1052 if(!persistentlyMapped)
1053 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1054 }
1055}
1056
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001057static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001058{
1059 outAllocation.m_Allocation = nullptr;
1060 outAllocation.m_Buffer = nullptr;
1061 outAllocation.m_Image = nullptr;
1062 outAllocation.m_StartValue = (uint32_t)rand();
1063
1064 VmaAllocationCreateInfo vmaMemReq;
1065 GetMemReq(vmaMemReq);
1066
1067 VmaAllocationInfo allocInfo;
1068
1069 const bool isBuffer = true;//(rand() & 0x1) != 0;
1070 const bool isLarge = (rand() % 16) == 0;
1071 if(isBuffer)
1072 {
1073 const uint32_t bufferSize = isLarge ?
1074 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1075 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1076
1077 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1078 bufferInfo.size = bufferSize;
1079 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1080
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001081 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001082 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001083 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001084 }
1085 else
1086 {
1087 const uint32_t imageSizeX = isLarge ?
1088 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1089 rand() % 1024 + 1; // 1 ... 1024
1090 const uint32_t imageSizeY = isLarge ?
1091 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1092 rand() % 1024 + 1; // 1 ... 1024
1093
1094 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1095 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1096 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1097 imageInfo.extent.width = imageSizeX;
1098 imageInfo.extent.height = imageSizeY;
1099 imageInfo.extent.depth = 1;
1100 imageInfo.mipLevels = 1;
1101 imageInfo.arrayLayers = 1;
1102 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1103 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1104 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1105 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1106
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001107 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001108 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001109 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001110 }
1111
1112 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1113 if(allocInfo.pMappedData == nullptr)
1114 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001115 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001116 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001117 }
1118
1119 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001120 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001121 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1122 data[i] = value++;
1123
1124 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001125 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001126}
1127
1128static void DestroyAllocation(const AllocInfo& allocation)
1129{
1130 if(allocation.m_Buffer)
1131 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1132 else
1133 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1134}
1135
1136static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1137{
1138 for(size_t i = allocations.size(); i--; )
1139 DestroyAllocation(allocations[i]);
1140 allocations.clear();
1141}
1142
1143static void ValidateAllocationData(const AllocInfo& allocation)
1144{
1145 VmaAllocationInfo allocInfo;
1146 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1147
1148 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1149 if(allocInfo.pMappedData == nullptr)
1150 {
1151 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001152 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001153 }
1154
1155 uint32_t value = allocation.m_StartValue;
1156 bool ok = true;
1157 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001158 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001159 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1160 {
1161 if(data[i] != value++)
1162 {
1163 ok = false;
1164 break;
1165 }
1166 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001167 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001168
1169 if(allocInfo.pMappedData == nullptr)
1170 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1171}
1172
1173static void RecreateAllocationResource(AllocInfo& allocation)
1174{
1175 VmaAllocationInfo allocInfo;
1176 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1177
1178 if(allocation.m_Buffer)
1179 {
1180 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1181
1182 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001183 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001184
1185 // Just to silence validation layer warnings.
1186 VkMemoryRequirements vkMemReq;
1187 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001188 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001189
1190 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001191 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001192 }
1193 else
1194 {
1195 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1196
1197 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001198 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001199
1200 // Just to silence validation layer warnings.
1201 VkMemoryRequirements vkMemReq;
1202 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1203
1204 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001205 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001206 }
1207}
1208
1209static void Defragment(AllocInfo* allocs, size_t allocCount,
1210 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1211 VmaDefragmentationStats* defragmentationStats = nullptr)
1212{
1213 std::vector<VmaAllocation> vmaAllocs(allocCount);
1214 for(size_t i = 0; i < allocCount; ++i)
1215 vmaAllocs[i] = allocs[i].m_Allocation;
1216
1217 std::vector<VkBool32> allocChanged(allocCount);
1218
1219 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1220 defragmentationInfo, defragmentationStats) );
1221
1222 for(size_t i = 0; i < allocCount; ++i)
1223 {
1224 if(allocChanged[i])
1225 {
1226 RecreateAllocationResource(allocs[i]);
1227 }
1228 }
1229}
1230
1231static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1232{
1233 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1234 ValidateAllocationData(allocInfo);
1235 });
1236}
1237
1238void TestDefragmentationSimple()
1239{
1240 wprintf(L"Test defragmentation simple\n");
1241
1242 RandomNumberGenerator rand(667);
1243
1244 const VkDeviceSize BUF_SIZE = 0x10000;
1245 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1246
1247 const VkDeviceSize MIN_BUF_SIZE = 32;
1248 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1249 auto RandomBufSize = [&]() -> VkDeviceSize {
1250 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1251 };
1252
1253 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1254 bufCreateInfo.size = BUF_SIZE;
1255 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1256
1257 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1258 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1259
1260 uint32_t memTypeIndex = UINT32_MAX;
1261 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1262
1263 VmaPoolCreateInfo poolCreateInfo = {};
1264 poolCreateInfo.blockSize = BLOCK_SIZE;
1265 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1266
1267 VmaPool pool;
1268 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1269
Adam Sawickie1681912018-11-23 17:50:12 +01001270 // Defragmentation of empty pool.
1271 {
1272 VmaDefragmentationInfo2 defragInfo = {};
1273 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1274 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1275 defragInfo.poolCount = 1;
1276 defragInfo.pPools = &pool;
1277
1278 VmaDefragmentationStats defragStats = {};
1279 VmaDefragmentationContext defragCtx = nullptr;
1280 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1281 TEST(res >= VK_SUCCESS);
1282 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1283 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1284 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1285 }
1286
Adam Sawickib8333fb2018-03-13 16:15:53 +01001287 std::vector<AllocInfo> allocations;
1288
1289 // persistentlyMappedOption = 0 - not persistently mapped.
1290 // persistentlyMappedOption = 1 - persistently mapped.
1291 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1292 {
1293 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1294 const bool persistentlyMapped = persistentlyMappedOption != 0;
1295
1296 // # Test 1
1297 // Buffers of fixed size.
1298 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1299 // Expected result: at least 1 block freed.
1300 {
1301 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1302 {
1303 AllocInfo allocInfo;
1304 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1305 allocations.push_back(allocInfo);
1306 }
1307
1308 for(size_t i = 1; i < allocations.size(); ++i)
1309 {
1310 DestroyAllocation(allocations[i]);
1311 allocations.erase(allocations.begin() + i);
1312 }
1313
1314 VmaDefragmentationStats defragStats;
1315 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001316 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1317 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001318
1319 ValidateAllocationsData(allocations.data(), allocations.size());
1320
1321 DestroyAllAllocations(allocations);
1322 }
1323
1324 // # Test 2
1325 // Buffers of fixed size.
1326 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1327 // Expected result: Each of 4 interations makes some progress.
1328 {
1329 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1330 {
1331 AllocInfo allocInfo;
1332 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1333 allocations.push_back(allocInfo);
1334 }
1335
1336 for(size_t i = 1; i < allocations.size(); ++i)
1337 {
1338 DestroyAllocation(allocations[i]);
1339 allocations.erase(allocations.begin() + i);
1340 }
1341
1342 VmaDefragmentationInfo defragInfo = {};
1343 defragInfo.maxAllocationsToMove = 1;
1344 defragInfo.maxBytesToMove = BUF_SIZE;
1345
1346 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1347 {
1348 VmaDefragmentationStats defragStats;
1349 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001350 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001351 }
1352
1353 ValidateAllocationsData(allocations.data(), allocations.size());
1354
1355 DestroyAllAllocations(allocations);
1356 }
1357
1358 // # Test 3
1359 // Buffers of variable size.
1360 // Create a number of buffers. Remove some percent of them.
1361 // Defragment while having some percent of them unmovable.
1362 // Expected result: Just simple validation.
1363 {
1364 for(size_t i = 0; i < 100; ++i)
1365 {
1366 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1367 localBufCreateInfo.size = RandomBufSize();
1368
1369 AllocInfo allocInfo;
1370 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1371 allocations.push_back(allocInfo);
1372 }
1373
1374 const uint32_t percentToDelete = 60;
1375 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1376 for(size_t i = 0; i < numberToDelete; ++i)
1377 {
1378 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1379 DestroyAllocation(allocations[indexToDelete]);
1380 allocations.erase(allocations.begin() + indexToDelete);
1381 }
1382
1383 // Non-movable allocations will be at the beginning of allocations array.
1384 const uint32_t percentNonMovable = 20;
1385 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1386 for(size_t i = 0; i < numberNonMovable; ++i)
1387 {
1388 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1389 if(indexNonMovable != i)
1390 std::swap(allocations[i], allocations[indexNonMovable]);
1391 }
1392
1393 VmaDefragmentationStats defragStats;
1394 Defragment(
1395 allocations.data() + numberNonMovable,
1396 allocations.size() - numberNonMovable,
1397 nullptr, &defragStats);
1398
1399 ValidateAllocationsData(allocations.data(), allocations.size());
1400
1401 DestroyAllAllocations(allocations);
1402 }
1403 }
1404
Adam Sawicki647cf242018-11-23 17:58:00 +01001405 /*
1406 Allocation that must be move to an overlapping place using memmove().
1407 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1408 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001409 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001410 {
1411 AllocInfo allocInfo[2];
1412
1413 bufCreateInfo.size = BUF_SIZE;
1414 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1415 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1416 bufCreateInfo.size = biggerBufSize;
1417 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1418
1419 DestroyAllocation(allocInfo[0]);
1420
1421 VmaDefragmentationStats defragStats;
1422 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1423 // If this fails, it means we couldn't do memmove with overlapping regions.
1424 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1425
1426 ValidateAllocationsData(&allocInfo[1], 1);
1427 DestroyAllocation(allocInfo[1]);
1428 }
1429
Adam Sawickib8333fb2018-03-13 16:15:53 +01001430 vmaDestroyPool(g_hAllocator, pool);
1431}
1432
Adam Sawicki52076eb2018-11-22 16:14:50 +01001433void TestDefragmentationWholePool()
1434{
1435 wprintf(L"Test defragmentation whole pool\n");
1436
1437 RandomNumberGenerator rand(668);
1438
1439 const VkDeviceSize BUF_SIZE = 0x10000;
1440 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1441
1442 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1443 bufCreateInfo.size = BUF_SIZE;
1444 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1445
1446 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1447 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1448
1449 uint32_t memTypeIndex = UINT32_MAX;
1450 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1451
1452 VmaPoolCreateInfo poolCreateInfo = {};
1453 poolCreateInfo.blockSize = BLOCK_SIZE;
1454 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1455
1456 VmaDefragmentationStats defragStats[2];
1457 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1458 {
1459 VmaPool pool;
1460 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1461
1462 std::vector<AllocInfo> allocations;
1463
1464 // Buffers of fixed size.
1465 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1466 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1467 {
1468 AllocInfo allocInfo;
1469 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1470 allocations.push_back(allocInfo);
1471 }
1472
1473 for(size_t i = 1; i < allocations.size(); ++i)
1474 {
1475 DestroyAllocation(allocations[i]);
1476 allocations.erase(allocations.begin() + i);
1477 }
1478
1479 VmaDefragmentationInfo2 defragInfo = {};
1480 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1481 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1482 std::vector<VmaAllocation> allocationsToDefrag;
1483 if(caseIndex == 0)
1484 {
1485 defragInfo.poolCount = 1;
1486 defragInfo.pPools = &pool;
1487 }
1488 else
1489 {
1490 const size_t allocCount = allocations.size();
1491 allocationsToDefrag.resize(allocCount);
1492 std::transform(
1493 allocations.begin(), allocations.end(),
1494 allocationsToDefrag.begin(),
1495 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1496 defragInfo.allocationCount = (uint32_t)allocCount;
1497 defragInfo.pAllocations = allocationsToDefrag.data();
1498 }
1499
1500 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1501 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1502 TEST(res >= VK_SUCCESS);
1503 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1504
1505 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1506
1507 ValidateAllocationsData(allocations.data(), allocations.size());
1508
1509 DestroyAllAllocations(allocations);
1510
1511 vmaDestroyPool(g_hAllocator, pool);
1512 }
1513
1514 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1515 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1516 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1517 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1518}
1519
Adam Sawickib8333fb2018-03-13 16:15:53 +01001520void TestDefragmentationFull()
1521{
1522 std::vector<AllocInfo> allocations;
1523
1524 // Create initial allocations.
1525 for(size_t i = 0; i < 400; ++i)
1526 {
1527 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001528 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001529 allocations.push_back(allocation);
1530 }
1531
1532 // Delete random allocations
1533 const size_t allocationsToDeletePercent = 80;
1534 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1535 for(size_t i = 0; i < allocationsToDelete; ++i)
1536 {
1537 size_t index = (size_t)rand() % allocations.size();
1538 DestroyAllocation(allocations[index]);
1539 allocations.erase(allocations.begin() + index);
1540 }
1541
1542 for(size_t i = 0; i < allocations.size(); ++i)
1543 ValidateAllocationData(allocations[i]);
1544
Adam Sawicki0667e332018-08-24 17:26:44 +02001545 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001546
1547 {
1548 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1549 for(size_t i = 0; i < allocations.size(); ++i)
1550 vmaAllocations[i] = allocations[i].m_Allocation;
1551
1552 const size_t nonMovablePercent = 0;
1553 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1554 for(size_t i = 0; i < nonMovableCount; ++i)
1555 {
1556 size_t index = (size_t)rand() % vmaAllocations.size();
1557 vmaAllocations.erase(vmaAllocations.begin() + index);
1558 }
1559
1560 const uint32_t defragCount = 1;
1561 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1562 {
1563 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1564
1565 VmaDefragmentationInfo defragmentationInfo;
1566 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1567 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1568
1569 wprintf(L"Defragmentation #%u\n", defragIndex);
1570
1571 time_point begTime = std::chrono::high_resolution_clock::now();
1572
1573 VmaDefragmentationStats stats;
1574 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001575 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001576
1577 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1578
1579 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1580 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1581 wprintf(L"Time: %.2f s\n", defragmentDuration);
1582
1583 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1584 {
1585 if(allocationsChanged[i])
1586 {
1587 RecreateAllocationResource(allocations[i]);
1588 }
1589 }
1590
1591 for(size_t i = 0; i < allocations.size(); ++i)
1592 ValidateAllocationData(allocations[i]);
1593
Adam Sawicki0667e332018-08-24 17:26:44 +02001594 //wchar_t fileName[MAX_PATH];
1595 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1596 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001597 }
1598 }
1599
1600 // Destroy all remaining allocations.
1601 DestroyAllAllocations(allocations);
1602}
1603
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001604static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001605{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001606 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001607 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001608
1609 std::vector<AllocInfo> allocations;
1610
1611 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001612 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1613 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001614 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001615 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1616 const size_t percentToLeave = 30;
1617 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001618 RandomNumberGenerator rand = { 234522 };
1619
1620 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001621
1622 VmaAllocationCreateInfo allocCreateInfo = {};
1623 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001624 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001625
1626 // Create all intended buffers.
1627 for(size_t i = 0; i < bufCount; ++i)
1628 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001629 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1630
1631 if(rand.Generate() % 100 < percentNonMovable)
1632 {
1633 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1634 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1635 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1636 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1637 }
1638 else
1639 {
1640 // Different usage just to see different color in output from VmaDumpVis.
1641 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1642 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1643 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1644 // And in JSON dump.
1645 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1646 }
1647
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001648 AllocInfo alloc;
1649 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1650 alloc.m_StartValue = rand.Generate();
1651 allocations.push_back(alloc);
1652 }
1653
1654 // Destroy some percentage of them.
1655 {
1656 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1657 for(size_t i = 0; i < buffersToDestroy; ++i)
1658 {
1659 const size_t index = rand.Generate() % allocations.size();
1660 allocations[index].Destroy();
1661 allocations.erase(allocations.begin() + index);
1662 }
1663 }
1664
1665 // Fill them with meaningful data.
1666 UploadGpuData(allocations.data(), allocations.size());
1667
Adam Sawickic6ede152018-11-16 17:04:14 +01001668 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001669 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001670 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001671
1672 // Defragment using GPU only.
1673 {
1674 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001675
Adam Sawickic6ede152018-11-16 17:04:14 +01001676 std::vector<VmaAllocation> allocationPtrs;
1677 std::vector<VkBool32> allocationChanged;
1678 std::vector<size_t> allocationOriginalIndex;
1679
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001680 for(size_t i = 0; i < allocCount; ++i)
1681 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001682 VmaAllocationInfo allocInfo = {};
1683 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1684 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1685 {
1686 allocationPtrs.push_back(allocations[i].m_Allocation);
1687 allocationChanged.push_back(VK_FALSE);
1688 allocationOriginalIndex.push_back(i);
1689 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001690 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001691
1692 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001693
1694 BeginSingleTimeCommands();
1695
1696 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001697 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001698 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001699 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001700 defragInfo.pAllocationsChanged = allocationChanged.data();
1701 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001702 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1703 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1704
1705 VmaDefragmentationStats stats = {};
1706 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1707 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1708 TEST(res >= VK_SUCCESS);
1709
1710 EndSingleTimeCommands();
1711
1712 vmaDefragmentationEnd(g_hAllocator, ctx);
1713
Adam Sawickic6ede152018-11-16 17:04:14 +01001714 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001715 {
1716 if(allocationChanged[i])
1717 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001718 const size_t origAllocIndex = allocationOriginalIndex[i];
1719 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001720 }
1721 }
1722
Adam Sawicki4d844e22019-01-24 16:21:05 +01001723 // If corruption detection is enabled, GPU defragmentation may not work on
1724 // memory types that have this detection active, e.g. on Intel.
1725 if(VMA_DEBUG_DETECT_CORRUPTION == 0)
1726 {
1727 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1728 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
1729 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001730 }
1731
1732 ValidateGpuData(allocations.data(), allocations.size());
1733
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001734 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001735 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001736
1737 // Destroy all remaining buffers.
1738 for(size_t i = allocations.size(); i--; )
1739 {
1740 allocations[i].Destroy();
1741 }
Adam Sawicki05704002018-11-08 16:07:29 +01001742
1743 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001744}
1745
Adam Sawickib8333fb2018-03-13 16:15:53 +01001746static void TestUserData()
1747{
1748 VkResult res;
1749
1750 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1751 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1752 bufCreateInfo.size = 0x10000;
1753
1754 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1755 {
1756 // Opaque pointer
1757 {
1758
1759 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1760 void* pointerToSomething = &res;
1761
1762 VmaAllocationCreateInfo allocCreateInfo = {};
1763 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1764 allocCreateInfo.pUserData = numberAsPointer;
1765 if(testIndex == 1)
1766 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1767
1768 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1769 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001770 TEST(res == VK_SUCCESS);
1771 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001772
1773 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001774 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001775
1776 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1777 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001778 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001779
1780 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1781 }
1782
1783 // String
1784 {
1785 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1786 const char* name2 = "2";
1787 const size_t name1Len = strlen(name1);
1788
1789 char* name1Buf = new char[name1Len + 1];
1790 strcpy_s(name1Buf, name1Len + 1, name1);
1791
1792 VmaAllocationCreateInfo allocCreateInfo = {};
1793 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1794 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1795 allocCreateInfo.pUserData = name1Buf;
1796 if(testIndex == 1)
1797 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1798
1799 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1800 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001801 TEST(res == VK_SUCCESS);
1802 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1803 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001804
1805 delete[] name1Buf;
1806
1807 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001808 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001809
1810 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1811 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001812 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001813
1814 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1815 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001816 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001817
1818 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1819 }
1820 }
1821}
1822
Adam Sawicki370ab182018-11-08 16:31:00 +01001823static void TestInvalidAllocations()
1824{
1825 VkResult res;
1826
1827 VmaAllocationCreateInfo allocCreateInfo = {};
1828 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1829
1830 // Try to allocate 0 bytes.
1831 {
1832 VkMemoryRequirements memReq = {};
1833 memReq.size = 0; // !!!
1834 memReq.alignment = 4;
1835 memReq.memoryTypeBits = UINT32_MAX;
1836 VmaAllocation alloc = VK_NULL_HANDLE;
1837 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1838 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1839 }
1840
1841 // Try to create buffer with size = 0.
1842 {
1843 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1844 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1845 bufCreateInfo.size = 0; // !!!
1846 VkBuffer buf = VK_NULL_HANDLE;
1847 VmaAllocation alloc = VK_NULL_HANDLE;
1848 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1849 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1850 }
1851
1852 // Try to create image with one dimension = 0.
1853 {
1854 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1855 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1856 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1857 imageCreateInfo.extent.width = 128;
1858 imageCreateInfo.extent.height = 0; // !!!
1859 imageCreateInfo.extent.depth = 1;
1860 imageCreateInfo.mipLevels = 1;
1861 imageCreateInfo.arrayLayers = 1;
1862 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1863 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1864 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1865 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1866 VkImage image = VK_NULL_HANDLE;
1867 VmaAllocation alloc = VK_NULL_HANDLE;
1868 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1869 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1870 }
1871}
1872
Adam Sawickib8333fb2018-03-13 16:15:53 +01001873static void TestMemoryRequirements()
1874{
1875 VkResult res;
1876 VkBuffer buf;
1877 VmaAllocation alloc;
1878 VmaAllocationInfo allocInfo;
1879
1880 const VkPhysicalDeviceMemoryProperties* memProps;
1881 vmaGetMemoryProperties(g_hAllocator, &memProps);
1882
1883 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1884 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1885 bufInfo.size = 128;
1886
1887 VmaAllocationCreateInfo allocCreateInfo = {};
1888
1889 // No requirements.
1890 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001891 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001892 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1893
1894 // Usage.
1895 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1896 allocCreateInfo.requiredFlags = 0;
1897 allocCreateInfo.preferredFlags = 0;
1898 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1899
1900 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001901 TEST(res == VK_SUCCESS);
1902 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001903 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1904
1905 // Required flags, preferred flags.
1906 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1907 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1908 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1909 allocCreateInfo.memoryTypeBits = 0;
1910
1911 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001912 TEST(res == VK_SUCCESS);
1913 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1914 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001915 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1916
1917 // memoryTypeBits.
1918 const uint32_t memType = allocInfo.memoryType;
1919 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1920 allocCreateInfo.requiredFlags = 0;
1921 allocCreateInfo.preferredFlags = 0;
1922 allocCreateInfo.memoryTypeBits = 1u << memType;
1923
1924 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001925 TEST(res == VK_SUCCESS);
1926 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001927 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1928
1929}
1930
1931static void TestBasics()
1932{
1933 VkResult res;
1934
1935 TestMemoryRequirements();
1936
1937 // Lost allocation
1938 {
1939 VmaAllocation alloc = VK_NULL_HANDLE;
1940 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001941 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001942
1943 VmaAllocationInfo allocInfo;
1944 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001945 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1946 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001947
1948 vmaFreeMemory(g_hAllocator, alloc);
1949 }
1950
1951 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1952 {
1953 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1954 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1955 bufCreateInfo.size = 128;
1956
1957 VmaAllocationCreateInfo allocCreateInfo = {};
1958 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1959 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1960
1961 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1962 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001963 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001964
1965 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1966
1967 // Same with OWN_MEMORY.
1968 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1969
1970 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001971 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001972
1973 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1974 }
1975
1976 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001977
1978 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001979}
1980
1981void TestHeapSizeLimit()
1982{
1983 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1984 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1985
1986 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1987 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1988 {
1989 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1990 }
1991
1992 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1993 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1994 allocatorCreateInfo.device = g_hDevice;
1995 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1996
1997 VmaAllocator hAllocator;
1998 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001999 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002000
2001 struct Item
2002 {
2003 VkBuffer hBuf;
2004 VmaAllocation hAlloc;
2005 };
2006 std::vector<Item> items;
2007
2008 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2009 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2010
2011 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
2012 VmaAllocationInfo ownAllocInfo;
2013 {
2014 VmaAllocationCreateInfo allocCreateInfo = {};
2015 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2016 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2017
2018 bufCreateInfo.size = BLOCK_SIZE / 2;
2019
2020 for(size_t i = 0; i < 2; ++i)
2021 {
2022 Item item;
2023 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002024 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002025 items.push_back(item);
2026 }
2027 }
2028
2029 // Create pool to make sure allocations must be out of this memory type.
2030 VmaPoolCreateInfo poolCreateInfo = {};
2031 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
2032 poolCreateInfo.blockSize = BLOCK_SIZE;
2033
2034 VmaPool hPool;
2035 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002036 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002037
2038 // 2. Allocate normal buffers from all the remaining memory.
2039 {
2040 VmaAllocationCreateInfo allocCreateInfo = {};
2041 allocCreateInfo.pool = hPool;
2042
2043 bufCreateInfo.size = BLOCK_SIZE / 2;
2044
2045 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2046 for(size_t i = 0; i < bufCount; ++i)
2047 {
2048 Item item;
2049 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002050 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002051 items.push_back(item);
2052 }
2053 }
2054
2055 // 3. Allocation of one more (even small) buffer should fail.
2056 {
2057 VmaAllocationCreateInfo allocCreateInfo = {};
2058 allocCreateInfo.pool = hPool;
2059
2060 bufCreateInfo.size = 128;
2061
2062 VkBuffer hBuf;
2063 VmaAllocation hAlloc;
2064 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002065 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002066 }
2067
2068 // Destroy everything.
2069 for(size_t i = items.size(); i--; )
2070 {
2071 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2072 }
2073
2074 vmaDestroyPool(hAllocator, hPool);
2075
2076 vmaDestroyAllocator(hAllocator);
2077}
2078
Adam Sawicki212a4a62018-06-14 15:44:45 +02002079#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002080static void TestDebugMargin()
2081{
2082 if(VMA_DEBUG_MARGIN == 0)
2083 {
2084 return;
2085 }
2086
2087 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002088 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002089
2090 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002091 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002092
2093 // Create few buffers of different size.
2094 const size_t BUF_COUNT = 10;
2095 BufferInfo buffers[BUF_COUNT];
2096 VmaAllocationInfo allocInfo[BUF_COUNT];
2097 for(size_t i = 0; i < 10; ++i)
2098 {
2099 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002100 // Last one will be mapped.
2101 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002102
2103 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002104 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002105 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002106 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002107
2108 if(i == BUF_COUNT - 1)
2109 {
2110 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002111 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002112 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2113 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2114 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002115 }
2116
2117 // Check if their offsets preserve margin between them.
2118 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2119 {
2120 if(lhs.deviceMemory != rhs.deviceMemory)
2121 {
2122 return lhs.deviceMemory < rhs.deviceMemory;
2123 }
2124 return lhs.offset < rhs.offset;
2125 });
2126 for(size_t i = 1; i < BUF_COUNT; ++i)
2127 {
2128 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2129 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002130 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002131 }
2132 }
2133
Adam Sawicki212a4a62018-06-14 15:44:45 +02002134 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002135 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002136
Adam Sawicki73b16652018-06-11 16:39:25 +02002137 // Destroy all buffers.
2138 for(size_t i = BUF_COUNT; i--; )
2139 {
2140 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2141 }
2142}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002143#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002144
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002145static void TestLinearAllocator()
2146{
2147 wprintf(L"Test linear allocator\n");
2148
2149 RandomNumberGenerator rand{645332};
2150
2151 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2152 sampleBufCreateInfo.size = 1024; // Whatever.
2153 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2154
2155 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2156 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2157
2158 VmaPoolCreateInfo poolCreateInfo = {};
2159 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002160 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002161
Adam Sawickiee082772018-06-20 17:45:49 +02002162 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002163 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2164 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2165
2166 VmaPool pool = nullptr;
2167 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002168 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002169
2170 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2171
2172 VmaAllocationCreateInfo allocCreateInfo = {};
2173 allocCreateInfo.pool = pool;
2174
2175 constexpr size_t maxBufCount = 100;
2176 std::vector<BufferInfo> bufInfo;
2177
2178 constexpr VkDeviceSize bufSizeMin = 16;
2179 constexpr VkDeviceSize bufSizeMax = 1024;
2180 VmaAllocationInfo allocInfo;
2181 VkDeviceSize prevOffset = 0;
2182
2183 // Test one-time free.
2184 for(size_t i = 0; i < 2; ++i)
2185 {
2186 // Allocate number of buffers of varying size that surely fit into this block.
2187 VkDeviceSize bufSumSize = 0;
2188 for(size_t i = 0; i < maxBufCount; ++i)
2189 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002190 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002191 BufferInfo newBufInfo;
2192 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2193 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002194 TEST(res == VK_SUCCESS);
2195 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002196 bufInfo.push_back(newBufInfo);
2197 prevOffset = allocInfo.offset;
2198 bufSumSize += bufCreateInfo.size;
2199 }
2200
2201 // Validate pool stats.
2202 VmaPoolStats stats;
2203 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002204 TEST(stats.size == poolCreateInfo.blockSize);
2205 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2206 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002207
2208 // Destroy the buffers in random order.
2209 while(!bufInfo.empty())
2210 {
2211 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2212 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2213 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2214 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2215 }
2216 }
2217
2218 // Test stack.
2219 {
2220 // Allocate number of buffers of varying size that surely fit into this block.
2221 for(size_t i = 0; i < maxBufCount; ++i)
2222 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002223 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002224 BufferInfo newBufInfo;
2225 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2226 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002227 TEST(res == VK_SUCCESS);
2228 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002229 bufInfo.push_back(newBufInfo);
2230 prevOffset = allocInfo.offset;
2231 }
2232
2233 // Destroy few buffers from top of the stack.
2234 for(size_t i = 0; i < maxBufCount / 5; ++i)
2235 {
2236 const BufferInfo& currBufInfo = bufInfo.back();
2237 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2238 bufInfo.pop_back();
2239 }
2240
2241 // Create some more
2242 for(size_t i = 0; i < maxBufCount / 5; ++i)
2243 {
Adam Sawickifd366b62019-01-24 15:26:43 +01002244 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002245 BufferInfo newBufInfo;
2246 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2247 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002248 TEST(res == VK_SUCCESS);
2249 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002250 bufInfo.push_back(newBufInfo);
2251 prevOffset = allocInfo.offset;
2252 }
2253
2254 // Destroy the buffers in reverse order.
2255 while(!bufInfo.empty())
2256 {
2257 const BufferInfo& currBufInfo = bufInfo.back();
2258 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2259 bufInfo.pop_back();
2260 }
2261 }
2262
Adam Sawickiee082772018-06-20 17:45:49 +02002263 // Test ring buffer.
2264 {
2265 // Allocate number of buffers that surely fit into this block.
2266 bufCreateInfo.size = bufSizeMax;
2267 for(size_t i = 0; i < maxBufCount; ++i)
2268 {
2269 BufferInfo newBufInfo;
2270 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2271 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002272 TEST(res == VK_SUCCESS);
2273 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002274 bufInfo.push_back(newBufInfo);
2275 prevOffset = allocInfo.offset;
2276 }
2277
2278 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2279 const size_t buffersPerIter = maxBufCount / 10 - 1;
2280 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2281 for(size_t iter = 0; iter < iterCount; ++iter)
2282 {
2283 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2284 {
2285 const BufferInfo& currBufInfo = bufInfo.front();
2286 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2287 bufInfo.erase(bufInfo.begin());
2288 }
2289 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2290 {
2291 BufferInfo newBufInfo;
2292 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2293 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002294 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002295 bufInfo.push_back(newBufInfo);
2296 }
2297 }
2298
2299 // Allocate buffers until we reach out-of-memory.
2300 uint32_t debugIndex = 0;
2301 while(res == VK_SUCCESS)
2302 {
2303 BufferInfo newBufInfo;
2304 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2305 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2306 if(res == VK_SUCCESS)
2307 {
2308 bufInfo.push_back(newBufInfo);
2309 }
2310 else
2311 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002312 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002313 }
2314 ++debugIndex;
2315 }
2316
2317 // Destroy the buffers in random order.
2318 while(!bufInfo.empty())
2319 {
2320 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2321 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2322 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2323 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2324 }
2325 }
2326
Adam Sawicki680b2252018-08-22 14:47:32 +02002327 // Test double stack.
2328 {
2329 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2330 VkDeviceSize prevOffsetLower = 0;
2331 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2332 for(size_t i = 0; i < maxBufCount; ++i)
2333 {
2334 const bool upperAddress = (i % 2) != 0;
2335 if(upperAddress)
2336 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2337 else
2338 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002339 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002340 BufferInfo newBufInfo;
2341 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2342 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002343 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002344 if(upperAddress)
2345 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002346 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002347 prevOffsetUpper = allocInfo.offset;
2348 }
2349 else
2350 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002351 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002352 prevOffsetLower = allocInfo.offset;
2353 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002354 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002355 bufInfo.push_back(newBufInfo);
2356 }
2357
2358 // Destroy few buffers from top of the stack.
2359 for(size_t i = 0; i < maxBufCount / 5; ++i)
2360 {
2361 const BufferInfo& currBufInfo = bufInfo.back();
2362 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2363 bufInfo.pop_back();
2364 }
2365
2366 // Create some more
2367 for(size_t i = 0; i < maxBufCount / 5; ++i)
2368 {
2369 const bool upperAddress = (i % 2) != 0;
2370 if(upperAddress)
2371 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2372 else
2373 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002374 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002375 BufferInfo newBufInfo;
2376 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2377 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002378 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002379 bufInfo.push_back(newBufInfo);
2380 }
2381
2382 // Destroy the buffers in reverse order.
2383 while(!bufInfo.empty())
2384 {
2385 const BufferInfo& currBufInfo = bufInfo.back();
2386 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2387 bufInfo.pop_back();
2388 }
2389
2390 // Create buffers on both sides until we reach out of memory.
2391 prevOffsetLower = 0;
2392 prevOffsetUpper = poolCreateInfo.blockSize;
2393 res = VK_SUCCESS;
2394 for(size_t i = 0; res == VK_SUCCESS; ++i)
2395 {
2396 const bool upperAddress = (i % 2) != 0;
2397 if(upperAddress)
2398 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2399 else
2400 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01002401 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02002402 BufferInfo newBufInfo;
2403 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2404 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2405 if(res == VK_SUCCESS)
2406 {
2407 if(upperAddress)
2408 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002409 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002410 prevOffsetUpper = allocInfo.offset;
2411 }
2412 else
2413 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002414 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002415 prevOffsetLower = allocInfo.offset;
2416 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002417 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002418 bufInfo.push_back(newBufInfo);
2419 }
2420 }
2421
2422 // Destroy the buffers in random order.
2423 while(!bufInfo.empty())
2424 {
2425 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2426 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2427 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2428 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2429 }
2430
2431 // Create buffers on upper side only, constant size, until we reach out of memory.
2432 prevOffsetUpper = poolCreateInfo.blockSize;
2433 res = VK_SUCCESS;
2434 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2435 bufCreateInfo.size = bufSizeMax;
2436 for(size_t i = 0; res == VK_SUCCESS; ++i)
2437 {
2438 BufferInfo newBufInfo;
2439 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2440 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2441 if(res == VK_SUCCESS)
2442 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002443 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002444 prevOffsetUpper = allocInfo.offset;
2445 bufInfo.push_back(newBufInfo);
2446 }
2447 }
2448
2449 // Destroy the buffers in reverse order.
2450 while(!bufInfo.empty())
2451 {
2452 const BufferInfo& currBufInfo = bufInfo.back();
2453 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2454 bufInfo.pop_back();
2455 }
2456 }
2457
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002458 // Test ring buffer with lost allocations.
2459 {
2460 // Allocate number of buffers until pool is full.
2461 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2462 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2463 res = VK_SUCCESS;
2464 for(size_t i = 0; res == VK_SUCCESS; ++i)
2465 {
2466 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2467
Adam Sawickifd366b62019-01-24 15:26:43 +01002468 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002469
2470 BufferInfo newBufInfo;
2471 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2472 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2473 if(res == VK_SUCCESS)
2474 bufInfo.push_back(newBufInfo);
2475 }
2476
2477 // Free first half of it.
2478 {
2479 const size_t buffersToDelete = bufInfo.size() / 2;
2480 for(size_t i = 0; i < buffersToDelete; ++i)
2481 {
2482 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2483 }
2484 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2485 }
2486
2487 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002488 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002489 res = VK_SUCCESS;
2490 for(size_t i = 0; res == VK_SUCCESS; ++i)
2491 {
2492 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2493
Adam Sawickifd366b62019-01-24 15:26:43 +01002494 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002495
2496 BufferInfo newBufInfo;
2497 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2498 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2499 if(res == VK_SUCCESS)
2500 bufInfo.push_back(newBufInfo);
2501 }
2502
2503 VkDeviceSize firstNewOffset;
2504 {
2505 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2506
2507 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2508 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2509 bufCreateInfo.size = bufSizeMax;
2510
2511 BufferInfo newBufInfo;
2512 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2513 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002514 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002515 bufInfo.push_back(newBufInfo);
2516 firstNewOffset = allocInfo.offset;
2517
2518 // Make sure at least one buffer from the beginning became lost.
2519 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002520 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002521 }
2522
Adam Sawickifd366b62019-01-24 15:26:43 +01002523#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002524 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2525 size_t newCount = 1;
2526 for(;;)
2527 {
2528 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2529
Adam Sawickifd366b62019-01-24 15:26:43 +01002530 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002531
2532 BufferInfo newBufInfo;
2533 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2534 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01002535
Adam Sawickib8d34d52018-10-03 17:41:20 +02002536 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002537 bufInfo.push_back(newBufInfo);
2538 ++newCount;
2539 if(allocInfo.offset < firstNewOffset)
2540 break;
2541 }
Adam Sawickifd366b62019-01-24 15:26:43 +01002542#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002543
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002544 // Delete buffers that are lost.
2545 for(size_t i = bufInfo.size(); i--; )
2546 {
2547 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2548 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2549 {
2550 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2551 bufInfo.erase(bufInfo.begin() + i);
2552 }
2553 }
2554
2555 // Test vmaMakePoolAllocationsLost
2556 {
2557 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2558
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01002559 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002560 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002561 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002562
2563 size_t realLostAllocCount = 0;
2564 for(size_t i = 0; i < bufInfo.size(); ++i)
2565 {
2566 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2567 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2568 ++realLostAllocCount;
2569 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002570 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002571 }
2572
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002573 // Destroy all the buffers in forward order.
2574 for(size_t i = 0; i < bufInfo.size(); ++i)
2575 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2576 bufInfo.clear();
2577 }
2578
Adam Sawicki70a683e2018-08-24 15:36:32 +02002579 vmaDestroyPool(g_hAllocator, pool);
2580}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002581
Adam Sawicki70a683e2018-08-24 15:36:32 +02002582static void TestLinearAllocatorMultiBlock()
2583{
2584 wprintf(L"Test linear allocator multi block\n");
2585
2586 RandomNumberGenerator rand{345673};
2587
2588 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2589 sampleBufCreateInfo.size = 1024 * 1024;
2590 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2591
2592 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2593 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2594
2595 VmaPoolCreateInfo poolCreateInfo = {};
2596 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2597 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002598 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002599
2600 VmaPool pool = nullptr;
2601 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002602 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002603
2604 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2605
2606 VmaAllocationCreateInfo allocCreateInfo = {};
2607 allocCreateInfo.pool = pool;
2608
2609 std::vector<BufferInfo> bufInfo;
2610 VmaAllocationInfo allocInfo;
2611
2612 // Test one-time free.
2613 {
2614 // Allocate buffers until we move to a second block.
2615 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2616 for(uint32_t i = 0; ; ++i)
2617 {
2618 BufferInfo newBufInfo;
2619 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2620 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002621 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002622 bufInfo.push_back(newBufInfo);
2623 if(lastMem && allocInfo.deviceMemory != lastMem)
2624 {
2625 break;
2626 }
2627 lastMem = allocInfo.deviceMemory;
2628 }
2629
Adam Sawickib8d34d52018-10-03 17:41:20 +02002630 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002631
2632 // Make sure that pool has now two blocks.
2633 VmaPoolStats poolStats = {};
2634 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002635 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002636
2637 // Destroy all the buffers in random order.
2638 while(!bufInfo.empty())
2639 {
2640 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2641 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2642 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2643 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2644 }
2645
2646 // Make sure that pool has now at most one block.
2647 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002648 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002649 }
2650
2651 // Test stack.
2652 {
2653 // Allocate buffers until we move to a second block.
2654 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2655 for(uint32_t i = 0; ; ++i)
2656 {
2657 BufferInfo newBufInfo;
2658 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2659 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002660 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002661 bufInfo.push_back(newBufInfo);
2662 if(lastMem && allocInfo.deviceMemory != lastMem)
2663 {
2664 break;
2665 }
2666 lastMem = allocInfo.deviceMemory;
2667 }
2668
Adam Sawickib8d34d52018-10-03 17:41:20 +02002669 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002670
2671 // Add few more buffers.
2672 for(uint32_t i = 0; i < 5; ++i)
2673 {
2674 BufferInfo newBufInfo;
2675 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2676 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002677 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002678 bufInfo.push_back(newBufInfo);
2679 }
2680
2681 // Make sure that pool has now two blocks.
2682 VmaPoolStats poolStats = {};
2683 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002684 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002685
2686 // Delete half of buffers, LIFO.
2687 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2688 {
2689 const BufferInfo& currBufInfo = bufInfo.back();
2690 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2691 bufInfo.pop_back();
2692 }
2693
2694 // Add one more buffer.
2695 BufferInfo newBufInfo;
2696 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2697 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002698 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002699 bufInfo.push_back(newBufInfo);
2700
2701 // Make sure that pool has now one block.
2702 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002703 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002704
2705 // Delete all the remaining buffers, LIFO.
2706 while(!bufInfo.empty())
2707 {
2708 const BufferInfo& currBufInfo = bufInfo.back();
2709 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2710 bufInfo.pop_back();
2711 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002712 }
2713
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002714 vmaDestroyPool(g_hAllocator, pool);
2715}
2716
Adam Sawickifd11d752018-08-22 15:02:10 +02002717static void ManuallyTestLinearAllocator()
2718{
2719 VmaStats origStats;
2720 vmaCalculateStats(g_hAllocator, &origStats);
2721
2722 wprintf(L"Manually test linear allocator\n");
2723
2724 RandomNumberGenerator rand{645332};
2725
2726 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2727 sampleBufCreateInfo.size = 1024; // Whatever.
2728 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2729
2730 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2731 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2732
2733 VmaPoolCreateInfo poolCreateInfo = {};
2734 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002735 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002736
2737 poolCreateInfo.blockSize = 10 * 1024;
2738 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2739 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2740
2741 VmaPool pool = nullptr;
2742 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002743 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002744
2745 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2746
2747 VmaAllocationCreateInfo allocCreateInfo = {};
2748 allocCreateInfo.pool = pool;
2749
2750 std::vector<BufferInfo> bufInfo;
2751 VmaAllocationInfo allocInfo;
2752 BufferInfo newBufInfo;
2753
2754 // Test double stack.
2755 {
2756 /*
2757 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2758 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2759
2760 Totally:
2761 1 block allocated
2762 10240 Vulkan bytes
2763 6 new allocations
2764 2256 bytes in allocations
2765 */
2766
2767 bufCreateInfo.size = 32;
2768 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2769 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002770 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002771 bufInfo.push_back(newBufInfo);
2772
2773 bufCreateInfo.size = 1024;
2774 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2775 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002776 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002777 bufInfo.push_back(newBufInfo);
2778
2779 bufCreateInfo.size = 32;
2780 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2781 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002782 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002783 bufInfo.push_back(newBufInfo);
2784
2785 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2786
2787 bufCreateInfo.size = 128;
2788 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2789 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002790 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002791 bufInfo.push_back(newBufInfo);
2792
2793 bufCreateInfo.size = 1024;
2794 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2795 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002796 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002797 bufInfo.push_back(newBufInfo);
2798
2799 bufCreateInfo.size = 16;
2800 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2801 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002802 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002803 bufInfo.push_back(newBufInfo);
2804
2805 VmaStats currStats;
2806 vmaCalculateStats(g_hAllocator, &currStats);
2807 VmaPoolStats poolStats;
2808 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2809
2810 char* statsStr = nullptr;
2811 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2812
2813 // PUT BREAKPOINT HERE TO CHECK.
2814 // Inspect: currStats versus origStats, poolStats, statsStr.
2815 int I = 0;
2816
2817 vmaFreeStatsString(g_hAllocator, statsStr);
2818
2819 // Destroy the buffers in reverse order.
2820 while(!bufInfo.empty())
2821 {
2822 const BufferInfo& currBufInfo = bufInfo.back();
2823 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2824 bufInfo.pop_back();
2825 }
2826 }
2827
2828 vmaDestroyPool(g_hAllocator, pool);
2829}
2830
Adam Sawicki80927152018-09-07 17:27:23 +02002831static void BenchmarkAlgorithmsCase(FILE* file,
2832 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002833 bool empty,
2834 VmaAllocationCreateFlags allocStrategy,
2835 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002836{
2837 RandomNumberGenerator rand{16223};
2838
2839 const VkDeviceSize bufSizeMin = 32;
2840 const VkDeviceSize bufSizeMax = 1024;
2841 const size_t maxBufCapacity = 10000;
2842 const uint32_t iterationCount = 10;
2843
2844 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2845 sampleBufCreateInfo.size = bufSizeMax;
2846 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2847
2848 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2849 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2850
2851 VmaPoolCreateInfo poolCreateInfo = {};
2852 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002853 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002854
2855 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002856 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002857 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2858
2859 VmaPool pool = nullptr;
2860 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002861 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002862
2863 // Buffer created just to get memory requirements. Never bound to any memory.
2864 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2865 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002866 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002867
2868 VkMemoryRequirements memReq = {};
2869 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2870
2871 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2872
2873 VmaAllocationCreateInfo allocCreateInfo = {};
2874 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002875 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002876
2877 VmaAllocation alloc;
2878 std::vector<VmaAllocation> baseAllocations;
2879
2880 if(!empty)
2881 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002882 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002883 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002884 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002885 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01002886 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
2887 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02002888 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2889 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002890 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002891 baseAllocations.push_back(alloc);
2892 totalSize += memReq.size;
2893 }
2894
2895 // Delete half of them, choose randomly.
2896 size_t allocsToDelete = baseAllocations.size() / 2;
2897 for(size_t i = 0; i < allocsToDelete; ++i)
2898 {
2899 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2900 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2901 baseAllocations.erase(baseAllocations.begin() + index);
2902 }
2903 }
2904
2905 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002906 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002907 std::vector<VmaAllocation> testAllocations;
2908 testAllocations.reserve(allocCount);
2909 duration allocTotalDuration = duration::zero();
2910 duration freeTotalDuration = duration::zero();
2911 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2912 {
2913 // Allocations
2914 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2915 for(size_t i = 0; i < allocCount; ++i)
2916 {
2917 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2918 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002919 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002920 testAllocations.push_back(alloc);
2921 }
2922 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2923
2924 // Deallocations
2925 switch(freeOrder)
2926 {
2927 case FREE_ORDER::FORWARD:
2928 // Leave testAllocations unchanged.
2929 break;
2930 case FREE_ORDER::BACKWARD:
2931 std::reverse(testAllocations.begin(), testAllocations.end());
2932 break;
2933 case FREE_ORDER::RANDOM:
2934 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2935 break;
2936 default: assert(0);
2937 }
2938
2939 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2940 for(size_t i = 0; i < allocCount; ++i)
2941 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2942 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2943
2944 testAllocations.clear();
2945 }
2946
2947 // Delete baseAllocations
2948 while(!baseAllocations.empty())
2949 {
2950 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2951 baseAllocations.pop_back();
2952 }
2953
2954 vmaDestroyPool(g_hAllocator, pool);
2955
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002956 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2957 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2958
Adam Sawicki80927152018-09-07 17:27:23 +02002959 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2960 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002961 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002962 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002963 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002964 allocTotalSeconds,
2965 freeTotalSeconds);
2966
2967 if(file)
2968 {
2969 std::string currTime;
2970 CurrentTimeToStr(currTime);
2971
Adam Sawicki80927152018-09-07 17:27:23 +02002972 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002973 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002974 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002975 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002976 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002977 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2978 allocTotalSeconds,
2979 freeTotalSeconds);
2980 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002981}
2982
Adam Sawicki80927152018-09-07 17:27:23 +02002983static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002984{
Adam Sawicki80927152018-09-07 17:27:23 +02002985 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002986
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002987 if(file)
2988 {
2989 fprintf(file,
2990 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002991 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002992 "Allocation time (s),Deallocation time (s)\n");
2993 }
2994
Adam Sawicki0a607132018-08-24 11:18:41 +02002995 uint32_t freeOrderCount = 1;
2996 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2997 freeOrderCount = 3;
2998 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2999 freeOrderCount = 2;
3000
3001 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003002 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003003
3004 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3005 {
3006 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3007 switch(freeOrderIndex)
3008 {
3009 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3010 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3011 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3012 default: assert(0);
3013 }
3014
3015 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3016 {
Adam Sawicki80927152018-09-07 17:27:23 +02003017 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003018 {
Adam Sawicki80927152018-09-07 17:27:23 +02003019 uint32_t algorithm = 0;
3020 switch(algorithmIndex)
3021 {
3022 case 0:
3023 break;
3024 case 1:
3025 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3026 break;
3027 case 2:
3028 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3029 break;
3030 default:
3031 assert(0);
3032 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003033
Adam Sawicki80927152018-09-07 17:27:23 +02003034 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003035 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3036 {
3037 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003038 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003039 {
3040 switch(allocStrategyIndex)
3041 {
3042 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3043 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3044 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3045 default: assert(0);
3046 }
3047 }
3048
Adam Sawicki80927152018-09-07 17:27:23 +02003049 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003050 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003051 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003052 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003053 strategy,
3054 freeOrder); // freeOrder
3055 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003056 }
3057 }
3058 }
3059}
3060
Adam Sawickib8333fb2018-03-13 16:15:53 +01003061static void TestPool_SameSize()
3062{
3063 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3064 const size_t BUF_COUNT = 100;
3065 VkResult res;
3066
3067 RandomNumberGenerator rand{123};
3068
3069 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3070 bufferInfo.size = BUF_SIZE;
3071 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3072
3073 uint32_t memoryTypeBits = UINT32_MAX;
3074 {
3075 VkBuffer dummyBuffer;
3076 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003077 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003078
3079 VkMemoryRequirements memReq;
3080 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3081 memoryTypeBits = memReq.memoryTypeBits;
3082
3083 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3084 }
3085
3086 VmaAllocationCreateInfo poolAllocInfo = {};
3087 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3088 uint32_t memTypeIndex;
3089 res = vmaFindMemoryTypeIndex(
3090 g_hAllocator,
3091 memoryTypeBits,
3092 &poolAllocInfo,
3093 &memTypeIndex);
3094
3095 VmaPoolCreateInfo poolCreateInfo = {};
3096 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3097 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3098 poolCreateInfo.minBlockCount = 1;
3099 poolCreateInfo.maxBlockCount = 4;
3100 poolCreateInfo.frameInUseCount = 0;
3101
3102 VmaPool pool;
3103 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003104 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003105
3106 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3107
3108 VmaAllocationCreateInfo allocInfo = {};
3109 allocInfo.pool = pool;
3110 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3111 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3112
3113 struct BufItem
3114 {
3115 VkBuffer Buf;
3116 VmaAllocation Alloc;
3117 };
3118 std::vector<BufItem> items;
3119
3120 // Fill entire pool.
3121 for(size_t i = 0; i < BUF_COUNT; ++i)
3122 {
3123 BufItem item;
3124 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003125 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003126 items.push_back(item);
3127 }
3128
3129 // Make sure that another allocation would fail.
3130 {
3131 BufItem item;
3132 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003133 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003134 }
3135
3136 // Validate that no buffer is lost. Also check that they are not mapped.
3137 for(size_t i = 0; i < items.size(); ++i)
3138 {
3139 VmaAllocationInfo allocInfo;
3140 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003141 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3142 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003143 }
3144
3145 // Free some percent of random items.
3146 {
3147 const size_t PERCENT_TO_FREE = 10;
3148 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3149 for(size_t i = 0; i < itemsToFree; ++i)
3150 {
3151 size_t index = (size_t)rand.Generate() % items.size();
3152 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3153 items.erase(items.begin() + index);
3154 }
3155 }
3156
3157 // Randomly allocate and free items.
3158 {
3159 const size_t OPERATION_COUNT = BUF_COUNT;
3160 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3161 {
3162 bool allocate = rand.Generate() % 2 != 0;
3163 if(allocate)
3164 {
3165 if(items.size() < BUF_COUNT)
3166 {
3167 BufItem item;
3168 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003169 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003170 items.push_back(item);
3171 }
3172 }
3173 else // Free
3174 {
3175 if(!items.empty())
3176 {
3177 size_t index = (size_t)rand.Generate() % items.size();
3178 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3179 items.erase(items.begin() + index);
3180 }
3181 }
3182 }
3183 }
3184
3185 // Allocate up to maximum.
3186 while(items.size() < BUF_COUNT)
3187 {
3188 BufItem item;
3189 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003190 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003191 items.push_back(item);
3192 }
3193
3194 // Validate that no buffer is lost.
3195 for(size_t i = 0; i < items.size(); ++i)
3196 {
3197 VmaAllocationInfo allocInfo;
3198 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003199 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003200 }
3201
3202 // Next frame.
3203 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3204
3205 // Allocate another BUF_COUNT buffers.
3206 for(size_t i = 0; i < BUF_COUNT; ++i)
3207 {
3208 BufItem item;
3209 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003210 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003211 items.push_back(item);
3212 }
3213
3214 // Make sure the first BUF_COUNT is lost. Delete them.
3215 for(size_t i = 0; i < BUF_COUNT; ++i)
3216 {
3217 VmaAllocationInfo allocInfo;
3218 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003219 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003220 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3221 }
3222 items.erase(items.begin(), items.begin() + BUF_COUNT);
3223
3224 // Validate that no buffer is lost.
3225 for(size_t i = 0; i < items.size(); ++i)
3226 {
3227 VmaAllocationInfo allocInfo;
3228 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003229 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003230 }
3231
3232 // Free one item.
3233 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3234 items.pop_back();
3235
3236 // Validate statistics.
3237 {
3238 VmaPoolStats poolStats = {};
3239 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003240 TEST(poolStats.allocationCount == items.size());
3241 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3242 TEST(poolStats.unusedRangeCount == 1);
3243 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3244 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003245 }
3246
3247 // Free all remaining items.
3248 for(size_t i = items.size(); i--; )
3249 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3250 items.clear();
3251
3252 // Allocate maximum items again.
3253 for(size_t i = 0; i < BUF_COUNT; ++i)
3254 {
3255 BufItem item;
3256 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003257 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003258 items.push_back(item);
3259 }
3260
3261 // Delete every other item.
3262 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3263 {
3264 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3265 items.erase(items.begin() + i);
3266 }
3267
3268 // Defragment!
3269 {
3270 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3271 for(size_t i = 0; i < items.size(); ++i)
3272 allocationsToDefragment[i] = items[i].Alloc;
3273
3274 VmaDefragmentationStats defragmentationStats;
3275 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003276 TEST(res == VK_SUCCESS);
3277 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003278 }
3279
3280 // Free all remaining items.
3281 for(size_t i = items.size(); i--; )
3282 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3283 items.clear();
3284
3285 ////////////////////////////////////////////////////////////////////////////////
3286 // Test for vmaMakePoolAllocationsLost
3287
3288 // Allocate 4 buffers on frame 10.
3289 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3290 for(size_t i = 0; i < 4; ++i)
3291 {
3292 BufItem item;
3293 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003294 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003295 items.push_back(item);
3296 }
3297
3298 // Touch first 2 of them on frame 11.
3299 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3300 for(size_t i = 0; i < 2; ++i)
3301 {
3302 VmaAllocationInfo allocInfo;
3303 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3304 }
3305
3306 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3307 size_t lostCount = 0xDEADC0DE;
3308 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003309 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003310
3311 // Make another call. Now 0 should be lost.
3312 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003313 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003314
3315 // Make another call, with null count. Should not crash.
3316 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3317
3318 // END: Free all remaining items.
3319 for(size_t i = items.size(); i--; )
3320 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3321
3322 items.clear();
3323
Adam Sawickid2924172018-06-11 12:48:46 +02003324 ////////////////////////////////////////////////////////////////////////////////
3325 // Test for allocation too large for pool
3326
3327 {
3328 VmaAllocationCreateInfo allocCreateInfo = {};
3329 allocCreateInfo.pool = pool;
3330
3331 VkMemoryRequirements memReq;
3332 memReq.memoryTypeBits = UINT32_MAX;
3333 memReq.alignment = 1;
3334 memReq.size = poolCreateInfo.blockSize + 4;
3335
3336 VmaAllocation alloc = nullptr;
3337 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003338 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003339 }
3340
Adam Sawickib8333fb2018-03-13 16:15:53 +01003341 vmaDestroyPool(g_hAllocator, pool);
3342}
3343
Adam Sawickib0c36362018-11-13 16:17:38 +01003344static void TestResize()
3345{
3346 wprintf(L"Testing vmaResizeAllocation...\n");
3347
3348 const VkDeviceSize KILOBYTE = 1024ull;
3349 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3350
3351 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3352 bufCreateInfo.size = 2 * MEGABYTE;
3353 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3354
3355 VmaAllocationCreateInfo allocCreateInfo = {};
3356 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3357
3358 uint32_t memTypeIndex = UINT32_MAX;
3359 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3360
3361 VmaPoolCreateInfo poolCreateInfo = {};
3362 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3363 poolCreateInfo.blockSize = 8 * MEGABYTE;
3364 poolCreateInfo.minBlockCount = 1;
3365 poolCreateInfo.maxBlockCount = 1;
3366 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3367
3368 VmaPool pool;
3369 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3370
3371 allocCreateInfo.pool = pool;
3372
3373 // Fill 8 MB pool with 4 * 2 MB allocations.
3374 VmaAllocation allocs[4] = {};
3375
3376 VkMemoryRequirements memReq = {};
3377 memReq.memoryTypeBits = UINT32_MAX;
3378 memReq.alignment = 4;
3379 memReq.size = bufCreateInfo.size;
3380
3381 VmaAllocationInfo allocInfo = {};
3382
3383 for(uint32_t i = 0; i < 4; ++i)
3384 {
3385 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3386 }
3387
3388 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3389
3390 // Case: Resize to the same size always succeeds.
3391 {
3392 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3393 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3394 TEST(allocInfo.size == 2ull * 1024 * 1024);
3395 }
3396
3397 // Case: Shrink allocation at the end.
3398 {
3399 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3400 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3401 TEST(allocInfo.size == 1ull * 1024 * 1024);
3402 }
3403
3404 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3405
3406 // Case: Shrink allocation before free space.
3407 {
3408 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3409 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3410 TEST(allocInfo.size == 512 * KILOBYTE);
3411 }
3412
3413 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3414
3415 // Case: Shrink allocation before next allocation.
3416 {
3417 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3418 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3419 TEST(allocInfo.size == 1 * MEGABYTE);
3420 }
3421
3422 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3423
3424 // Case: Grow allocation while there is even more space available.
3425 {
3426 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3427 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3428 TEST(allocInfo.size == 1 * MEGABYTE);
3429 }
3430
3431 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3432
3433 // Case: Grow allocation while there is exact amount of free space available.
3434 {
3435 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3436 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3437 TEST(allocInfo.size == 2 * MEGABYTE);
3438 }
3439
3440 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3441
3442 // Case: Fail to grow when there is not enough free space due to next allocation.
3443 {
3444 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3445 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3446 TEST(allocInfo.size == 2 * MEGABYTE);
3447 }
3448
3449 // Case: Fail to grow when there is not enough free space due to end of memory block.
3450 {
3451 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3452 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3453 TEST(allocInfo.size == 1 * MEGABYTE);
3454 }
3455
3456 for(uint32_t i = 4; i--; )
3457 {
3458 vmaFreeMemory(g_hAllocator, allocs[i]);
3459 }
3460
3461 vmaDestroyPool(g_hAllocator, pool);
3462
3463 // Test dedicated allocation
3464 {
3465 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3466 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3467 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3468
3469 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3470 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3471
3472 // Case: Resize to the same size always succeeds.
3473 {
3474 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3475 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3476 TEST(allocInfo.size == 2ull * 1024 * 1024);
3477 }
3478
3479 // Case: Shrinking fails.
3480 {
3481 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3482 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3483 TEST(allocInfo.size == 2ull * 1024 * 1024);
3484 }
3485
3486 // Case: Growing fails.
3487 {
3488 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3489 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3490 TEST(allocInfo.size == 2ull * 1024 * 1024);
3491 }
3492
3493 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3494 }
3495}
3496
Adam Sawickie44c6262018-06-15 14:30:39 +02003497static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3498{
3499 const uint8_t* pBytes = (const uint8_t*)pMemory;
3500 for(size_t i = 0; i < size; ++i)
3501 {
3502 if(pBytes[i] != pattern)
3503 {
3504 return false;
3505 }
3506 }
3507 return true;
3508}
3509
3510static void TestAllocationsInitialization()
3511{
3512 VkResult res;
3513
3514 const size_t BUF_SIZE = 1024;
3515
3516 // Create pool.
3517
3518 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3519 bufInfo.size = BUF_SIZE;
3520 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3521
3522 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3523 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3524
3525 VmaPoolCreateInfo poolCreateInfo = {};
3526 poolCreateInfo.blockSize = BUF_SIZE * 10;
3527 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3528 poolCreateInfo.maxBlockCount = 1;
3529 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003530 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003531
3532 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3533 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003534 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003535
3536 // Create one persistently mapped buffer to keep memory of this block mapped,
3537 // so that pointer to mapped data will remain (more or less...) valid even
3538 // after destruction of other allocations.
3539
3540 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3541 VkBuffer firstBuf;
3542 VmaAllocation firstAlloc;
3543 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003544 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003545
3546 // Test buffers.
3547
3548 for(uint32_t i = 0; i < 2; ++i)
3549 {
3550 const bool persistentlyMapped = i == 0;
3551 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3552 VkBuffer buf;
3553 VmaAllocation alloc;
3554 VmaAllocationInfo allocInfo;
3555 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003556 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003557
3558 void* pMappedData;
3559 if(!persistentlyMapped)
3560 {
3561 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003562 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003563 }
3564 else
3565 {
3566 pMappedData = allocInfo.pMappedData;
3567 }
3568
3569 // Validate initialized content
3570 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003571 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003572
3573 if(!persistentlyMapped)
3574 {
3575 vmaUnmapMemory(g_hAllocator, alloc);
3576 }
3577
3578 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3579
3580 // Validate freed content
3581 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003582 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003583 }
3584
3585 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3586 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3587}
3588
Adam Sawickib8333fb2018-03-13 16:15:53 +01003589static void TestPool_Benchmark(
3590 PoolTestResult& outResult,
3591 const PoolTestConfig& config)
3592{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003593 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003594
3595 RandomNumberGenerator mainRand{config.RandSeed};
3596
3597 uint32_t allocationSizeProbabilitySum = std::accumulate(
3598 config.AllocationSizes.begin(),
3599 config.AllocationSizes.end(),
3600 0u,
3601 [](uint32_t sum, const AllocationSize& allocSize) {
3602 return sum + allocSize.Probability;
3603 });
3604
3605 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3606 bufferInfo.size = 256; // Whatever.
3607 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3608
3609 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3610 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3611 imageInfo.extent.width = 256; // Whatever.
3612 imageInfo.extent.height = 256; // Whatever.
3613 imageInfo.extent.depth = 1;
3614 imageInfo.mipLevels = 1;
3615 imageInfo.arrayLayers = 1;
3616 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3617 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3618 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3619 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3620 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3621
3622 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3623 {
3624 VkBuffer dummyBuffer;
3625 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003626 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003627
3628 VkMemoryRequirements memReq;
3629 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3630 bufferMemoryTypeBits = memReq.memoryTypeBits;
3631
3632 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3633 }
3634
3635 uint32_t imageMemoryTypeBits = UINT32_MAX;
3636 {
3637 VkImage dummyImage;
3638 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003639 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003640
3641 VkMemoryRequirements memReq;
3642 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3643 imageMemoryTypeBits = memReq.memoryTypeBits;
3644
3645 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3646 }
3647
3648 uint32_t memoryTypeBits = 0;
3649 if(config.UsesBuffers() && config.UsesImages())
3650 {
3651 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3652 if(memoryTypeBits == 0)
3653 {
3654 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3655 return;
3656 }
3657 }
3658 else if(config.UsesBuffers())
3659 memoryTypeBits = bufferMemoryTypeBits;
3660 else if(config.UsesImages())
3661 memoryTypeBits = imageMemoryTypeBits;
3662 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003663 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003664
3665 VmaPoolCreateInfo poolCreateInfo = {};
3666 poolCreateInfo.memoryTypeIndex = 0;
3667 poolCreateInfo.minBlockCount = 1;
3668 poolCreateInfo.maxBlockCount = 1;
3669 poolCreateInfo.blockSize = config.PoolSize;
3670 poolCreateInfo.frameInUseCount = 1;
3671
3672 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3673 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3674 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3675
3676 VmaPool pool;
3677 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003678 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003679
3680 // Start time measurement - after creating pool and initializing data structures.
3681 time_point timeBeg = std::chrono::high_resolution_clock::now();
3682
3683 ////////////////////////////////////////////////////////////////////////////////
3684 // ThreadProc
3685 auto ThreadProc = [&](
3686 PoolTestThreadResult* outThreadResult,
3687 uint32_t randSeed,
3688 HANDLE frameStartEvent,
3689 HANDLE frameEndEvent) -> void
3690 {
3691 RandomNumberGenerator threadRand{randSeed};
3692
3693 outThreadResult->AllocationTimeMin = duration::max();
3694 outThreadResult->AllocationTimeSum = duration::zero();
3695 outThreadResult->AllocationTimeMax = duration::min();
3696 outThreadResult->DeallocationTimeMin = duration::max();
3697 outThreadResult->DeallocationTimeSum = duration::zero();
3698 outThreadResult->DeallocationTimeMax = duration::min();
3699 outThreadResult->AllocationCount = 0;
3700 outThreadResult->DeallocationCount = 0;
3701 outThreadResult->LostAllocationCount = 0;
3702 outThreadResult->LostAllocationTotalSize = 0;
3703 outThreadResult->FailedAllocationCount = 0;
3704 outThreadResult->FailedAllocationTotalSize = 0;
3705
3706 struct Item
3707 {
3708 VkDeviceSize BufferSize;
3709 VkExtent2D ImageSize;
3710 VkBuffer Buf;
3711 VkImage Image;
3712 VmaAllocation Alloc;
3713
3714 VkDeviceSize CalcSizeBytes() const
3715 {
3716 return BufferSize +
3717 ImageSize.width * ImageSize.height * 4;
3718 }
3719 };
3720 std::vector<Item> unusedItems, usedItems;
3721
3722 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3723
3724 // Create all items - all unused, not yet allocated.
3725 for(size_t i = 0; i < threadTotalItemCount; ++i)
3726 {
3727 Item item = {};
3728
3729 uint32_t allocSizeIndex = 0;
3730 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3731 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3732 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3733
3734 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3735 if(allocSize.BufferSizeMax > 0)
3736 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003737 TEST(allocSize.BufferSizeMin > 0);
3738 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003739 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3740 item.BufferSize = allocSize.BufferSizeMin;
3741 else
3742 {
3743 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3744 item.BufferSize = item.BufferSize / 16 * 16;
3745 }
3746 }
3747 else
3748 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003749 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003750 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3751 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3752 else
3753 {
3754 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3755 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3756 }
3757 }
3758
3759 unusedItems.push_back(item);
3760 }
3761
3762 auto Allocate = [&](Item& item) -> VkResult
3763 {
3764 VmaAllocationCreateInfo allocCreateInfo = {};
3765 allocCreateInfo.pool = pool;
3766 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3767 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3768
3769 if(item.BufferSize)
3770 {
3771 bufferInfo.size = item.BufferSize;
3772 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3773 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3774 }
3775 else
3776 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003777 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003778
3779 imageInfo.extent.width = item.ImageSize.width;
3780 imageInfo.extent.height = item.ImageSize.height;
3781 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3782 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3783 }
3784 };
3785
3786 ////////////////////////////////////////////////////////////////////////////////
3787 // Frames
3788 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3789 {
3790 WaitForSingleObject(frameStartEvent, INFINITE);
3791
3792 // Always make some percent of used bufs unused, to choose different used ones.
3793 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3794 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3795 {
3796 size_t index = threadRand.Generate() % usedItems.size();
3797 unusedItems.push_back(usedItems[index]);
3798 usedItems.erase(usedItems.begin() + index);
3799 }
3800
3801 // Determine which bufs we want to use in this frame.
3802 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3803 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003804 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003805 // Move some used to unused.
3806 while(usedBufCount < usedItems.size())
3807 {
3808 size_t index = threadRand.Generate() % usedItems.size();
3809 unusedItems.push_back(usedItems[index]);
3810 usedItems.erase(usedItems.begin() + index);
3811 }
3812 // Move some unused to used.
3813 while(usedBufCount > usedItems.size())
3814 {
3815 size_t index = threadRand.Generate() % unusedItems.size();
3816 usedItems.push_back(unusedItems[index]);
3817 unusedItems.erase(unusedItems.begin() + index);
3818 }
3819
3820 uint32_t touchExistingCount = 0;
3821 uint32_t touchLostCount = 0;
3822 uint32_t createSucceededCount = 0;
3823 uint32_t createFailedCount = 0;
3824
3825 // Touch all used bufs. If not created or lost, allocate.
3826 for(size_t i = 0; i < usedItems.size(); ++i)
3827 {
3828 Item& item = usedItems[i];
3829 // Not yet created.
3830 if(item.Alloc == VK_NULL_HANDLE)
3831 {
3832 res = Allocate(item);
3833 ++outThreadResult->AllocationCount;
3834 if(res != VK_SUCCESS)
3835 {
3836 item.Alloc = VK_NULL_HANDLE;
3837 item.Buf = VK_NULL_HANDLE;
3838 ++outThreadResult->FailedAllocationCount;
3839 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3840 ++createFailedCount;
3841 }
3842 else
3843 ++createSucceededCount;
3844 }
3845 else
3846 {
3847 // Touch.
3848 VmaAllocationInfo allocInfo;
3849 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3850 // Lost.
3851 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3852 {
3853 ++touchLostCount;
3854
3855 // Destroy.
3856 {
3857 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3858 if(item.Buf)
3859 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3860 else
3861 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3862 ++outThreadResult->DeallocationCount;
3863 }
3864 item.Alloc = VK_NULL_HANDLE;
3865 item.Buf = VK_NULL_HANDLE;
3866
3867 ++outThreadResult->LostAllocationCount;
3868 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3869
3870 // Recreate.
3871 res = Allocate(item);
3872 ++outThreadResult->AllocationCount;
3873 // Creation failed.
3874 if(res != VK_SUCCESS)
3875 {
3876 ++outThreadResult->FailedAllocationCount;
3877 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3878 ++createFailedCount;
3879 }
3880 else
3881 ++createSucceededCount;
3882 }
3883 else
3884 ++touchExistingCount;
3885 }
3886 }
3887
3888 /*
3889 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3890 randSeed, frameIndex,
3891 touchExistingCount, touchLostCount,
3892 createSucceededCount, createFailedCount);
3893 */
3894
3895 SetEvent(frameEndEvent);
3896 }
3897
3898 // Free all remaining items.
3899 for(size_t i = usedItems.size(); i--; )
3900 {
3901 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3902 if(usedItems[i].Buf)
3903 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3904 else
3905 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3906 ++outThreadResult->DeallocationCount;
3907 }
3908 for(size_t i = unusedItems.size(); i--; )
3909 {
3910 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3911 if(unusedItems[i].Buf)
3912 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3913 else
3914 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3915 ++outThreadResult->DeallocationCount;
3916 }
3917 };
3918
3919 // Launch threads.
3920 uint32_t threadRandSeed = mainRand.Generate();
3921 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3922 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3923 std::vector<std::thread> bkgThreads;
3924 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3925 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3926 {
3927 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3928 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3929 bkgThreads.emplace_back(std::bind(
3930 ThreadProc,
3931 &threadResults[threadIndex],
3932 threadRandSeed + threadIndex,
3933 frameStartEvents[threadIndex],
3934 frameEndEvents[threadIndex]));
3935 }
3936
3937 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003938 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003939 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3940 {
3941 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3942 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3943 SetEvent(frameStartEvents[threadIndex]);
3944 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3945 }
3946
3947 // Wait for threads finished
3948 for(size_t i = 0; i < bkgThreads.size(); ++i)
3949 {
3950 bkgThreads[i].join();
3951 CloseHandle(frameEndEvents[i]);
3952 CloseHandle(frameStartEvents[i]);
3953 }
3954 bkgThreads.clear();
3955
3956 // Finish time measurement - before destroying pool.
3957 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3958
3959 vmaDestroyPool(g_hAllocator, pool);
3960
3961 outResult.AllocationTimeMin = duration::max();
3962 outResult.AllocationTimeAvg = duration::zero();
3963 outResult.AllocationTimeMax = duration::min();
3964 outResult.DeallocationTimeMin = duration::max();
3965 outResult.DeallocationTimeAvg = duration::zero();
3966 outResult.DeallocationTimeMax = duration::min();
3967 outResult.LostAllocationCount = 0;
3968 outResult.LostAllocationTotalSize = 0;
3969 outResult.FailedAllocationCount = 0;
3970 outResult.FailedAllocationTotalSize = 0;
3971 size_t allocationCount = 0;
3972 size_t deallocationCount = 0;
3973 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3974 {
3975 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3976 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3977 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3978 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3979 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3980 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3981 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3982 allocationCount += threadResult.AllocationCount;
3983 deallocationCount += threadResult.DeallocationCount;
3984 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3985 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3986 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3987 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3988 }
3989 if(allocationCount)
3990 outResult.AllocationTimeAvg /= allocationCount;
3991 if(deallocationCount)
3992 outResult.DeallocationTimeAvg /= deallocationCount;
3993}
3994
3995static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3996{
3997 if(ptr1 < ptr2)
3998 return ptr1 + size1 > ptr2;
3999 else if(ptr2 < ptr1)
4000 return ptr2 + size2 > ptr1;
4001 else
4002 return true;
4003}
4004
4005static void TestMapping()
4006{
4007 wprintf(L"Testing mapping...\n");
4008
4009 VkResult res;
4010 uint32_t memTypeIndex = UINT32_MAX;
4011
4012 enum TEST
4013 {
4014 TEST_NORMAL,
4015 TEST_POOL,
4016 TEST_DEDICATED,
4017 TEST_COUNT
4018 };
4019 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4020 {
4021 VmaPool pool = nullptr;
4022 if(testIndex == TEST_POOL)
4023 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004024 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004025 VmaPoolCreateInfo poolInfo = {};
4026 poolInfo.memoryTypeIndex = memTypeIndex;
4027 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004028 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004029 }
4030
4031 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4032 bufInfo.size = 0x10000;
4033 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4034
4035 VmaAllocationCreateInfo allocCreateInfo = {};
4036 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4037 allocCreateInfo.pool = pool;
4038 if(testIndex == TEST_DEDICATED)
4039 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4040
4041 VmaAllocationInfo allocInfo;
4042
4043 // Mapped manually
4044
4045 // Create 2 buffers.
4046 BufferInfo bufferInfos[3];
4047 for(size_t i = 0; i < 2; ++i)
4048 {
4049 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4050 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004051 TEST(res == VK_SUCCESS);
4052 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004053 memTypeIndex = allocInfo.memoryType;
4054 }
4055
4056 // Map buffer 0.
4057 char* data00 = nullptr;
4058 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004059 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004060 data00[0xFFFF] = data00[0];
4061
4062 // Map buffer 0 second time.
4063 char* data01 = nullptr;
4064 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004065 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004066
4067 // Map buffer 1.
4068 char* data1 = nullptr;
4069 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004070 TEST(res == VK_SUCCESS && data1 != nullptr);
4071 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004072 data1[0xFFFF] = data1[0];
4073
4074 // Unmap buffer 0 two times.
4075 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4076 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4077 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004078 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004079
4080 // Unmap buffer 1.
4081 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4082 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004083 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004084
4085 // Create 3rd buffer - persistently mapped.
4086 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4087 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4088 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004089 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004090
4091 // Map buffer 2.
4092 char* data2 = nullptr;
4093 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004094 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004095 data2[0xFFFF] = data2[0];
4096
4097 // Unmap buffer 2.
4098 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4099 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004100 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004101
4102 // Destroy all buffers.
4103 for(size_t i = 3; i--; )
4104 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4105
4106 vmaDestroyPool(g_hAllocator, pool);
4107 }
4108}
4109
4110static void TestMappingMultithreaded()
4111{
4112 wprintf(L"Testing mapping multithreaded...\n");
4113
4114 static const uint32_t threadCount = 16;
4115 static const uint32_t bufferCount = 1024;
4116 static const uint32_t threadBufferCount = bufferCount / threadCount;
4117
4118 VkResult res;
4119 volatile uint32_t memTypeIndex = UINT32_MAX;
4120
4121 enum TEST
4122 {
4123 TEST_NORMAL,
4124 TEST_POOL,
4125 TEST_DEDICATED,
4126 TEST_COUNT
4127 };
4128 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4129 {
4130 VmaPool pool = nullptr;
4131 if(testIndex == TEST_POOL)
4132 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004133 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004134 VmaPoolCreateInfo poolInfo = {};
4135 poolInfo.memoryTypeIndex = memTypeIndex;
4136 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004137 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004138 }
4139
4140 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4141 bufCreateInfo.size = 0x10000;
4142 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4143
4144 VmaAllocationCreateInfo allocCreateInfo = {};
4145 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4146 allocCreateInfo.pool = pool;
4147 if(testIndex == TEST_DEDICATED)
4148 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4149
4150 std::thread threads[threadCount];
4151 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4152 {
4153 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4154 // ======== THREAD FUNCTION ========
4155
4156 RandomNumberGenerator rand{threadIndex};
4157
4158 enum class MODE
4159 {
4160 // Don't map this buffer at all.
4161 DONT_MAP,
4162 // Map and quickly unmap.
4163 MAP_FOR_MOMENT,
4164 // Map and unmap before destruction.
4165 MAP_FOR_LONGER,
4166 // Map two times. Quickly unmap, second unmap before destruction.
4167 MAP_TWO_TIMES,
4168 // Create this buffer as persistently mapped.
4169 PERSISTENTLY_MAPPED,
4170 COUNT
4171 };
4172 std::vector<BufferInfo> bufInfos{threadBufferCount};
4173 std::vector<MODE> bufModes{threadBufferCount};
4174
4175 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4176 {
4177 BufferInfo& bufInfo = bufInfos[bufferIndex];
4178 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4179 bufModes[bufferIndex] = mode;
4180
4181 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4182 if(mode == MODE::PERSISTENTLY_MAPPED)
4183 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4184
4185 VmaAllocationInfo allocInfo;
4186 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4187 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004188 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004189
4190 if(memTypeIndex == UINT32_MAX)
4191 memTypeIndex = allocInfo.memoryType;
4192
4193 char* data = nullptr;
4194
4195 if(mode == MODE::PERSISTENTLY_MAPPED)
4196 {
4197 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004198 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004199 }
4200 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4201 mode == MODE::MAP_TWO_TIMES)
4202 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004203 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004204 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004205 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004206
4207 if(mode == MODE::MAP_TWO_TIMES)
4208 {
4209 char* data2 = nullptr;
4210 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004211 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004212 }
4213 }
4214 else if(mode == MODE::DONT_MAP)
4215 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004216 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004217 }
4218 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004219 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004220
4221 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4222 if(data)
4223 data[0xFFFF] = data[0];
4224
4225 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4226 {
4227 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4228
4229 VmaAllocationInfo allocInfo;
4230 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4231 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004232 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004233 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004234 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004235 }
4236
4237 switch(rand.Generate() % 3)
4238 {
4239 case 0: Sleep(0); break; // Yield.
4240 case 1: Sleep(10); break; // 10 ms
4241 // default: No sleep.
4242 }
4243
4244 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4245 if(data)
4246 data[0xFFFF] = data[0];
4247 }
4248
4249 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4250 {
4251 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4252 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4253 {
4254 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4255
4256 VmaAllocationInfo allocInfo;
4257 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004258 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004259 }
4260
4261 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4262 }
4263 });
4264 }
4265
4266 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4267 threads[threadIndex].join();
4268
4269 vmaDestroyPool(g_hAllocator, pool);
4270 }
4271}
4272
4273static void WriteMainTestResultHeader(FILE* file)
4274{
4275 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004276 "Code,Time,"
4277 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004278 "Total Time (us),"
4279 "Allocation Time Min (us),"
4280 "Allocation Time Avg (us),"
4281 "Allocation Time Max (us),"
4282 "Deallocation Time Min (us),"
4283 "Deallocation Time Avg (us),"
4284 "Deallocation Time Max (us),"
4285 "Total Memory Allocated (B),"
4286 "Free Range Size Avg (B),"
4287 "Free Range Size Max (B)\n");
4288}
4289
4290static void WriteMainTestResult(
4291 FILE* file,
4292 const char* codeDescription,
4293 const char* testDescription,
4294 const Config& config, const Result& result)
4295{
4296 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4297 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4298 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4299 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4300 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4301 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4302 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4303
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004304 std::string currTime;
4305 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004306
4307 fprintf(file,
4308 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004309 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4310 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004311 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004312 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004313 totalTimeSeconds * 1e6f,
4314 allocationTimeMinSeconds * 1e6f,
4315 allocationTimeAvgSeconds * 1e6f,
4316 allocationTimeMaxSeconds * 1e6f,
4317 deallocationTimeMinSeconds * 1e6f,
4318 deallocationTimeAvgSeconds * 1e6f,
4319 deallocationTimeMaxSeconds * 1e6f,
4320 result.TotalMemoryAllocated,
4321 result.FreeRangeSizeAvg,
4322 result.FreeRangeSizeMax);
4323}
4324
4325static void WritePoolTestResultHeader(FILE* file)
4326{
4327 fprintf(file,
4328 "Code,Test,Time,"
4329 "Config,"
4330 "Total Time (us),"
4331 "Allocation Time Min (us),"
4332 "Allocation Time Avg (us),"
4333 "Allocation Time Max (us),"
4334 "Deallocation Time Min (us),"
4335 "Deallocation Time Avg (us),"
4336 "Deallocation Time Max (us),"
4337 "Lost Allocation Count,"
4338 "Lost Allocation Total Size (B),"
4339 "Failed Allocation Count,"
4340 "Failed Allocation Total Size (B)\n");
4341}
4342
4343static void WritePoolTestResult(
4344 FILE* file,
4345 const char* codeDescription,
4346 const char* testDescription,
4347 const PoolTestConfig& config,
4348 const PoolTestResult& result)
4349{
4350 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4351 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4352 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4353 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4354 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4355 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4356 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4357
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004358 std::string currTime;
4359 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004360
4361 fprintf(file,
4362 "%s,%s,%s,"
4363 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4364 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4365 // General
4366 codeDescription,
4367 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004368 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004369 // Config
4370 config.ThreadCount,
4371 (unsigned long long)config.PoolSize,
4372 config.FrameCount,
4373 config.TotalItemCount,
4374 config.UsedItemCountMin,
4375 config.UsedItemCountMax,
4376 config.ItemsToMakeUnusedPercent,
4377 // Results
4378 totalTimeSeconds * 1e6f,
4379 allocationTimeMinSeconds * 1e6f,
4380 allocationTimeAvgSeconds * 1e6f,
4381 allocationTimeMaxSeconds * 1e6f,
4382 deallocationTimeMinSeconds * 1e6f,
4383 deallocationTimeAvgSeconds * 1e6f,
4384 deallocationTimeMaxSeconds * 1e6f,
4385 result.LostAllocationCount,
4386 result.LostAllocationTotalSize,
4387 result.FailedAllocationCount,
4388 result.FailedAllocationTotalSize);
4389}
4390
4391static void PerformCustomMainTest(FILE* file)
4392{
4393 Config config{};
4394 config.RandSeed = 65735476;
4395 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4396 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4397 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4398 config.FreeOrder = FREE_ORDER::FORWARD;
4399 config.ThreadCount = 16;
4400 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004401 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004402
4403 // Buffers
4404 //config.AllocationSizes.push_back({4, 16, 1024});
4405 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4406
4407 // Images
4408 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4409 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4410
4411 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4412 config.AdditionalOperationCount = 1024;
4413
4414 Result result{};
4415 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004416 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004417 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4418}
4419
4420static void PerformCustomPoolTest(FILE* file)
4421{
4422 PoolTestConfig config;
4423 config.PoolSize = 100 * 1024 * 1024;
4424 config.RandSeed = 2345764;
4425 config.ThreadCount = 1;
4426 config.FrameCount = 200;
4427 config.ItemsToMakeUnusedPercent = 2;
4428
4429 AllocationSize allocSize = {};
4430 allocSize.BufferSizeMin = 1024;
4431 allocSize.BufferSizeMax = 1024 * 1024;
4432 allocSize.Probability = 1;
4433 config.AllocationSizes.push_back(allocSize);
4434
4435 allocSize.BufferSizeMin = 0;
4436 allocSize.BufferSizeMax = 0;
4437 allocSize.ImageSizeMin = 128;
4438 allocSize.ImageSizeMax = 1024;
4439 allocSize.Probability = 1;
4440 config.AllocationSizes.push_back(allocSize);
4441
4442 config.PoolSize = config.CalcAvgResourceSize() * 200;
4443 config.UsedItemCountMax = 160;
4444 config.TotalItemCount = config.UsedItemCountMax * 10;
4445 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4446
4447 g_MemoryAliasingWarningEnabled = false;
4448 PoolTestResult result = {};
4449 TestPool_Benchmark(result, config);
4450 g_MemoryAliasingWarningEnabled = true;
4451
4452 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4453}
4454
Adam Sawickib8333fb2018-03-13 16:15:53 +01004455static void PerformMainTests(FILE* file)
4456{
4457 uint32_t repeatCount = 1;
4458 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4459
4460 Config config{};
4461 config.RandSeed = 65735476;
4462 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4463 config.FreeOrder = FREE_ORDER::FORWARD;
4464
4465 size_t threadCountCount = 1;
4466 switch(ConfigType)
4467 {
4468 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4469 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4470 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4471 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4472 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4473 default: assert(0);
4474 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004475
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004476 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004477
Adam Sawickib8333fb2018-03-13 16:15:53 +01004478 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4479 {
4480 std::string desc1;
4481
4482 switch(threadCountIndex)
4483 {
4484 case 0:
4485 desc1 += "1_thread";
4486 config.ThreadCount = 1;
4487 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4488 break;
4489 case 1:
4490 desc1 += "16_threads+0%_common";
4491 config.ThreadCount = 16;
4492 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4493 break;
4494 case 2:
4495 desc1 += "16_threads+50%_common";
4496 config.ThreadCount = 16;
4497 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4498 break;
4499 case 3:
4500 desc1 += "16_threads+100%_common";
4501 config.ThreadCount = 16;
4502 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4503 break;
4504 case 4:
4505 desc1 += "2_threads+0%_common";
4506 config.ThreadCount = 2;
4507 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4508 break;
4509 case 5:
4510 desc1 += "2_threads+50%_common";
4511 config.ThreadCount = 2;
4512 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4513 break;
4514 case 6:
4515 desc1 += "2_threads+100%_common";
4516 config.ThreadCount = 2;
4517 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4518 break;
4519 default:
4520 assert(0);
4521 }
4522
4523 // 0 = buffers, 1 = images, 2 = buffers and images
4524 size_t buffersVsImagesCount = 2;
4525 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4526 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4527 {
4528 std::string desc2 = desc1;
4529 switch(buffersVsImagesIndex)
4530 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004531 case 0: desc2 += ",Buffers"; break;
4532 case 1: desc2 += ",Images"; break;
4533 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004534 default: assert(0);
4535 }
4536
4537 // 0 = small, 1 = large, 2 = small and large
4538 size_t smallVsLargeCount = 2;
4539 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4540 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4541 {
4542 std::string desc3 = desc2;
4543 switch(smallVsLargeIndex)
4544 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004545 case 0: desc3 += ",Small"; break;
4546 case 1: desc3 += ",Large"; break;
4547 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004548 default: assert(0);
4549 }
4550
4551 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4552 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4553 else
4554 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4555
4556 // 0 = varying sizes min...max, 1 = set of constant sizes
4557 size_t constantSizesCount = 1;
4558 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4559 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4560 {
4561 std::string desc4 = desc3;
4562 switch(constantSizesIndex)
4563 {
4564 case 0: desc4 += " Varying_sizes"; break;
4565 case 1: desc4 += " Constant_sizes"; break;
4566 default: assert(0);
4567 }
4568
4569 config.AllocationSizes.clear();
4570 // Buffers present
4571 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4572 {
4573 // Small
4574 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4575 {
4576 // Varying size
4577 if(constantSizesIndex == 0)
4578 config.AllocationSizes.push_back({4, 16, 1024});
4579 // Constant sizes
4580 else
4581 {
4582 config.AllocationSizes.push_back({1, 16, 16});
4583 config.AllocationSizes.push_back({1, 64, 64});
4584 config.AllocationSizes.push_back({1, 256, 256});
4585 config.AllocationSizes.push_back({1, 1024, 1024});
4586 }
4587 }
4588 // Large
4589 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4590 {
4591 // Varying size
4592 if(constantSizesIndex == 0)
4593 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4594 // Constant sizes
4595 else
4596 {
4597 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4598 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4599 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4600 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4601 }
4602 }
4603 }
4604 // Images present
4605 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4606 {
4607 // Small
4608 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4609 {
4610 // Varying size
4611 if(constantSizesIndex == 0)
4612 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4613 // Constant sizes
4614 else
4615 {
4616 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4617 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4618 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4619 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4620 }
4621 }
4622 // Large
4623 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4624 {
4625 // Varying size
4626 if(constantSizesIndex == 0)
4627 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4628 // Constant sizes
4629 else
4630 {
4631 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4632 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4633 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4634 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4635 }
4636 }
4637 }
4638
4639 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4640 size_t beginBytesToAllocateCount = 1;
4641 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4642 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4643 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4644 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4645 {
4646 std::string desc5 = desc4;
4647
4648 switch(beginBytesToAllocateIndex)
4649 {
4650 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004651 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004652 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4653 config.AdditionalOperationCount = 0;
4654 break;
4655 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004656 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004657 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4658 config.AdditionalOperationCount = 1024;
4659 break;
4660 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004661 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004662 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4663 config.AdditionalOperationCount = 1024;
4664 break;
4665 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004666 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004667 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4668 config.AdditionalOperationCount = 1024;
4669 break;
4670 default:
4671 assert(0);
4672 }
4673
Adam Sawicki0667e332018-08-24 17:26:44 +02004674 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004675 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004676 std::string desc6 = desc5;
4677 switch(strategyIndex)
4678 {
4679 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004680 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004681 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4682 break;
4683 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004684 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004685 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4686 break;
4687 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004688 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004689 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4690 break;
4691 default:
4692 assert(0);
4693 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004694
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004695 desc6 += ',';
4696 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004697
4698 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004699
4700 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4701 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004702 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004703
4704 Result result{};
4705 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004706 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004707 if(file)
4708 {
4709 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4710 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004711 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004712 }
4713 }
4714 }
4715 }
4716 }
4717 }
4718}
4719
4720static void PerformPoolTests(FILE* file)
4721{
4722 const size_t AVG_RESOURCES_PER_POOL = 300;
4723
4724 uint32_t repeatCount = 1;
4725 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4726
4727 PoolTestConfig config{};
4728 config.RandSeed = 2346343;
4729 config.FrameCount = 200;
4730 config.ItemsToMakeUnusedPercent = 2;
4731
4732 size_t threadCountCount = 1;
4733 switch(ConfigType)
4734 {
4735 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4736 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4737 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4738 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4739 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4740 default: assert(0);
4741 }
4742 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4743 {
4744 std::string desc1;
4745
4746 switch(threadCountIndex)
4747 {
4748 case 0:
4749 desc1 += "1_thread";
4750 config.ThreadCount = 1;
4751 break;
4752 case 1:
4753 desc1 += "16_threads";
4754 config.ThreadCount = 16;
4755 break;
4756 case 2:
4757 desc1 += "2_threads";
4758 config.ThreadCount = 2;
4759 break;
4760 default:
4761 assert(0);
4762 }
4763
4764 // 0 = buffers, 1 = images, 2 = buffers and images
4765 size_t buffersVsImagesCount = 2;
4766 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4767 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4768 {
4769 std::string desc2 = desc1;
4770 switch(buffersVsImagesIndex)
4771 {
4772 case 0: desc2 += " Buffers"; break;
4773 case 1: desc2 += " Images"; break;
4774 case 2: desc2 += " Buffers+Images"; break;
4775 default: assert(0);
4776 }
4777
4778 // 0 = small, 1 = large, 2 = small and large
4779 size_t smallVsLargeCount = 2;
4780 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4781 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4782 {
4783 std::string desc3 = desc2;
4784 switch(smallVsLargeIndex)
4785 {
4786 case 0: desc3 += " Small"; break;
4787 case 1: desc3 += " Large"; break;
4788 case 2: desc3 += " Small+Large"; break;
4789 default: assert(0);
4790 }
4791
4792 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4793 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4794 else
4795 config.PoolSize = 4ull * 1024 * 1024;
4796
4797 // 0 = varying sizes min...max, 1 = set of constant sizes
4798 size_t constantSizesCount = 1;
4799 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4800 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4801 {
4802 std::string desc4 = desc3;
4803 switch(constantSizesIndex)
4804 {
4805 case 0: desc4 += " Varying_sizes"; break;
4806 case 1: desc4 += " Constant_sizes"; break;
4807 default: assert(0);
4808 }
4809
4810 config.AllocationSizes.clear();
4811 // Buffers present
4812 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4813 {
4814 // Small
4815 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4816 {
4817 // Varying size
4818 if(constantSizesIndex == 0)
4819 config.AllocationSizes.push_back({4, 16, 1024});
4820 // Constant sizes
4821 else
4822 {
4823 config.AllocationSizes.push_back({1, 16, 16});
4824 config.AllocationSizes.push_back({1, 64, 64});
4825 config.AllocationSizes.push_back({1, 256, 256});
4826 config.AllocationSizes.push_back({1, 1024, 1024});
4827 }
4828 }
4829 // Large
4830 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4831 {
4832 // Varying size
4833 if(constantSizesIndex == 0)
4834 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4835 // Constant sizes
4836 else
4837 {
4838 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4839 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4840 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4841 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4842 }
4843 }
4844 }
4845 // Images present
4846 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4847 {
4848 // Small
4849 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4850 {
4851 // Varying size
4852 if(constantSizesIndex == 0)
4853 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4854 // Constant sizes
4855 else
4856 {
4857 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4858 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4859 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4860 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4861 }
4862 }
4863 // Large
4864 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4865 {
4866 // Varying size
4867 if(constantSizesIndex == 0)
4868 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4869 // Constant sizes
4870 else
4871 {
4872 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4873 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4874 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4875 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4876 }
4877 }
4878 }
4879
4880 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4881 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4882
4883 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4884 size_t subscriptionModeCount;
4885 switch(ConfigType)
4886 {
4887 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4888 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4889 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4890 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4891 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4892 default: assert(0);
4893 }
4894 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4895 {
4896 std::string desc5 = desc4;
4897
4898 switch(subscriptionModeIndex)
4899 {
4900 case 0:
4901 desc5 += " Subscription_66%";
4902 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4903 break;
4904 case 1:
4905 desc5 += " Subscription_133%";
4906 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4907 break;
4908 case 2:
4909 desc5 += " Subscription_100%";
4910 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4911 break;
4912 case 3:
4913 desc5 += " Subscription_33%";
4914 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4915 break;
4916 case 4:
4917 desc5 += " Subscription_166%";
4918 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4919 break;
4920 default:
4921 assert(0);
4922 }
4923
4924 config.TotalItemCount = config.UsedItemCountMax * 5;
4925 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4926
4927 const char* testDescription = desc5.c_str();
4928
4929 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4930 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004931 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004932
4933 PoolTestResult result{};
4934 g_MemoryAliasingWarningEnabled = false;
4935 TestPool_Benchmark(result, config);
4936 g_MemoryAliasingWarningEnabled = true;
4937 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4938 }
4939 }
4940 }
4941 }
4942 }
4943 }
4944}
4945
Adam Sawickia83793a2018-09-03 13:40:42 +02004946static void BasicTestBuddyAllocator()
4947{
4948 wprintf(L"Basic test buddy allocator\n");
4949
4950 RandomNumberGenerator rand{76543};
4951
4952 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4953 sampleBufCreateInfo.size = 1024; // Whatever.
4954 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4955
4956 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4957 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4958
4959 VmaPoolCreateInfo poolCreateInfo = {};
4960 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004961 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004962
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004963 // Deliberately adding 1023 to test usable size smaller than memory block size.
4964 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004965 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004966 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004967
4968 VmaPool pool = nullptr;
4969 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004970 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004971
4972 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4973
4974 VmaAllocationCreateInfo allocCreateInfo = {};
4975 allocCreateInfo.pool = pool;
4976
4977 std::vector<BufferInfo> bufInfo;
4978 BufferInfo newBufInfo;
4979 VmaAllocationInfo allocInfo;
4980
4981 bufCreateInfo.size = 1024 * 256;
4982 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4983 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004984 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004985 bufInfo.push_back(newBufInfo);
4986
4987 bufCreateInfo.size = 1024 * 512;
4988 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4989 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004990 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004991 bufInfo.push_back(newBufInfo);
4992
4993 bufCreateInfo.size = 1024 * 128;
4994 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4995 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004996 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004997 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004998
4999 // Test very small allocation, smaller than minimum node size.
5000 bufCreateInfo.size = 1;
5001 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5002 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005003 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02005004 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02005005
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005006 // Test some small allocation with alignment requirement.
5007 {
5008 VkMemoryRequirements memReq;
5009 memReq.alignment = 256;
5010 memReq.memoryTypeBits = UINT32_MAX;
5011 memReq.size = 32;
5012
5013 newBufInfo.Buffer = VK_NULL_HANDLE;
5014 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
5015 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005016 TEST(res == VK_SUCCESS);
5017 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005018 bufInfo.push_back(newBufInfo);
5019 }
5020
5021 //SaveAllocatorStatsToFile(L"TEST.json");
5022
Adam Sawicki21017c62018-09-07 15:26:59 +02005023 VmaPoolStats stats = {};
5024 vmaGetPoolStats(g_hAllocator, pool, &stats);
5025 int DBG = 0; // Set breakpoint here to inspect `stats`.
5026
Adam Sawicki80927152018-09-07 17:27:23 +02005027 // Allocate enough new buffers to surely fall into second block.
5028 for(uint32_t i = 0; i < 32; ++i)
5029 {
5030 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
5031 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5032 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005033 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02005034 bufInfo.push_back(newBufInfo);
5035 }
5036
5037 SaveAllocatorStatsToFile(L"BuddyTest01.json");
5038
Adam Sawickia83793a2018-09-03 13:40:42 +02005039 // Destroy the buffers in random order.
5040 while(!bufInfo.empty())
5041 {
5042 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
5043 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
5044 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
5045 bufInfo.erase(bufInfo.begin() + indexToDestroy);
5046 }
5047
5048 vmaDestroyPool(g_hAllocator, pool);
5049}
5050
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005051static void BasicTestAllocatePages()
5052{
5053 wprintf(L"Basic test allocate pages\n");
5054
5055 RandomNumberGenerator rand{765461};
5056
5057 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5058 sampleBufCreateInfo.size = 1024; // Whatever.
5059 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5060
5061 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5062 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5063
5064 VmaPoolCreateInfo poolCreateInfo = {};
5065 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02005066 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005067
5068 // 1 block of 1 MB.
5069 poolCreateInfo.blockSize = 1024 * 1024;
5070 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
5071
5072 // Create pool.
5073 VmaPool pool = nullptr;
5074 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02005075 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005076
5077 // Make 100 allocations of 4 KB - they should fit into the pool.
5078 VkMemoryRequirements memReq;
5079 memReq.memoryTypeBits = UINT32_MAX;
5080 memReq.alignment = 4 * 1024;
5081 memReq.size = 4 * 1024;
5082
5083 VmaAllocationCreateInfo allocCreateInfo = {};
5084 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5085 allocCreateInfo.pool = pool;
5086
5087 constexpr uint32_t allocCount = 100;
5088
5089 std::vector<VmaAllocation> alloc{allocCount};
5090 std::vector<VmaAllocationInfo> allocInfo{allocCount};
5091 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005092 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005093 for(uint32_t i = 0; i < allocCount; ++i)
5094 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005095 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005096 allocInfo[i].pMappedData != nullptr &&
5097 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5098 allocInfo[i].memoryType == allocInfo[0].memoryType);
5099 }
5100
5101 // Free the allocations.
5102 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5103 std::fill(alloc.begin(), alloc.end(), nullptr);
5104 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5105
5106 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5107 // Also test optional allocationInfo = null.
5108 memReq.size = 100 * 1024;
5109 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005110 TEST(res != VK_SUCCESS);
5111 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005112
5113 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5114 memReq.size = 4 * 1024;
5115 memReq.alignment = 128 * 1024;
5116 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005117 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005118
5119 // Make 100 dedicated allocations of 4 KB.
5120 memReq.alignment = 4 * 1024;
5121 memReq.size = 4 * 1024;
5122
5123 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5124 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5125 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5126 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005127 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005128 for(uint32_t i = 0; i < allocCount; ++i)
5129 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005130 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005131 allocInfo[i].pMappedData != nullptr &&
5132 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5133 allocInfo[i].offset == 0);
5134 if(i > 0)
5135 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005136 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005137 }
5138 }
5139
5140 // Free the allocations.
5141 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5142 std::fill(alloc.begin(), alloc.end(), nullptr);
5143 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5144
5145 vmaDestroyPool(g_hAllocator, pool);
5146}
5147
Adam Sawickif2975342018-10-16 13:49:02 +02005148// Test the testing environment.
5149static void TestGpuData()
5150{
5151 RandomNumberGenerator rand = { 53434 };
5152
5153 std::vector<AllocInfo> allocInfo;
5154
5155 for(size_t i = 0; i < 100; ++i)
5156 {
5157 AllocInfo info = {};
5158
5159 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5160 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5161 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5162 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5163 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5164
5165 VmaAllocationCreateInfo allocCreateInfo = {};
5166 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5167
5168 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5169 TEST(res == VK_SUCCESS);
5170
5171 info.m_StartValue = rand.Generate();
5172
5173 allocInfo.push_back(std::move(info));
5174 }
5175
5176 UploadGpuData(allocInfo.data(), allocInfo.size());
5177
5178 ValidateGpuData(allocInfo.data(), allocInfo.size());
5179
5180 DestroyAllAllocations(allocInfo);
5181}
5182
Adam Sawickib8333fb2018-03-13 16:15:53 +01005183void Test()
5184{
5185 wprintf(L"TESTING:\n");
5186
Adam Sawicki5c8af7b2018-12-10 13:34:54 +01005187 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005188 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005189 ////////////////////////////////////////////////////////////////////////////////
5190 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02005191 return;
5192 }
5193
Adam Sawickib8333fb2018-03-13 16:15:53 +01005194 // # Simple tests
5195
5196 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005197 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005198#if VMA_DEBUG_MARGIN
5199 TestDebugMargin();
5200#else
5201 TestPool_SameSize();
5202 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01005203 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02005204#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005205#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5206 TestAllocationsInitialization();
5207#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005208 TestMapping();
5209 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005210 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005211 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005212 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005213
Adam Sawicki4338f662018-09-07 14:12:37 +02005214 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005215 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005216
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005217 {
5218 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005219 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005220 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005221 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005222 fclose(file);
5223 }
5224
Adam Sawickib8333fb2018-03-13 16:15:53 +01005225 TestDefragmentationSimple();
5226 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005227 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005228 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005229
5230 // # Detailed tests
5231 FILE* file;
5232 fopen_s(&file, "Results.csv", "w");
5233 assert(file != NULL);
5234
5235 WriteMainTestResultHeader(file);
5236 PerformMainTests(file);
5237 //PerformCustomMainTest(file);
5238
5239 WritePoolTestResultHeader(file);
5240 PerformPoolTests(file);
5241 //PerformCustomPoolTest(file);
5242
5243 fclose(file);
5244
5245 wprintf(L"Done.\n");
5246}
5247
Adam Sawickif1a793c2018-03-13 15:42:22 +01005248#endif // #ifdef _WIN32