blob: 6ad99fb191fac801ffa91ffd695f22246455d3c3 [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
2// Copyright (c) 2017-2019 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawickif1a793c2018-03-13 15:42:22 +010029
30#ifdef _WIN32
31
Adam Sawicki33d2ce72018-08-27 13:59:13 +020032static const char* CODE_DESCRIPTION = "Foo";
33
Adam Sawickif2975342018-10-16 13:49:02 +020034extern VkCommandBuffer g_hTemporaryCommandBuffer;
35void BeginSingleTimeCommands();
36void EndSingleTimeCommands();
37
Adam Sawickibdb89a92018-12-13 11:56:30 +010038#ifndef VMA_DEBUG_MARGIN
39 #define VMA_DEBUG_MARGIN 0
40#endif
41
Adam Sawicki0a607132018-08-24 11:18:41 +020042enum CONFIG_TYPE {
43 CONFIG_TYPE_MINIMUM,
44 CONFIG_TYPE_SMALL,
45 CONFIG_TYPE_AVERAGE,
46 CONFIG_TYPE_LARGE,
47 CONFIG_TYPE_MAXIMUM,
48 CONFIG_TYPE_COUNT
49};
50
Adam Sawickif2975342018-10-16 13:49:02 +020051static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
52//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020053
Adam Sawickib8333fb2018-03-13 16:15:53 +010054enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
55
Adam Sawicki0667e332018-08-24 17:26:44 +020056static const char* FREE_ORDER_NAMES[] = {
57 "FORWARD",
58 "BACKWARD",
59 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020060};
61
Adam Sawicki80927152018-09-07 17:27:23 +020062// Copy of internal VmaAlgorithmToStr.
63static const char* AlgorithmToStr(uint32_t algorithm)
64{
65 switch(algorithm)
66 {
67 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
68 return "Linear";
69 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
70 return "Buddy";
71 case 0:
72 return "Default";
73 default:
74 assert(0);
75 return "";
76 }
77}
78
Adam Sawickib8333fb2018-03-13 16:15:53 +010079struct AllocationSize
80{
81 uint32_t Probability;
82 VkDeviceSize BufferSizeMin, BufferSizeMax;
83 uint32_t ImageSizeMin, ImageSizeMax;
84};
85
86struct Config
87{
88 uint32_t RandSeed;
89 VkDeviceSize BeginBytesToAllocate;
90 uint32_t AdditionalOperationCount;
91 VkDeviceSize MaxBytesToAllocate;
92 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
93 std::vector<AllocationSize> AllocationSizes;
94 uint32_t ThreadCount;
95 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
96 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +020097 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +010098};
99
100struct Result
101{
102 duration TotalTime;
103 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
104 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
105 VkDeviceSize TotalMemoryAllocated;
106 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
107};
108
109void TestDefragmentationSimple();
110void TestDefragmentationFull();
111
112struct PoolTestConfig
113{
114 uint32_t RandSeed;
115 uint32_t ThreadCount;
116 VkDeviceSize PoolSize;
117 uint32_t FrameCount;
118 uint32_t TotalItemCount;
119 // Range for number of items used in each frame.
120 uint32_t UsedItemCountMin, UsedItemCountMax;
121 // Percent of items to make unused, and possibly make some others used in each frame.
122 uint32_t ItemsToMakeUnusedPercent;
123 std::vector<AllocationSize> AllocationSizes;
124
125 VkDeviceSize CalcAvgResourceSize() const
126 {
127 uint32_t probabilitySum = 0;
128 VkDeviceSize sizeSum = 0;
129 for(size_t i = 0; i < AllocationSizes.size(); ++i)
130 {
131 const AllocationSize& allocSize = AllocationSizes[i];
132 if(allocSize.BufferSizeMax > 0)
133 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
134 else
135 {
136 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
137 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
138 }
139 probabilitySum += allocSize.Probability;
140 }
141 return sizeSum / probabilitySum;
142 }
143
144 bool UsesBuffers() const
145 {
146 for(size_t i = 0; i < AllocationSizes.size(); ++i)
147 if(AllocationSizes[i].BufferSizeMax > 0)
148 return true;
149 return false;
150 }
151
152 bool UsesImages() const
153 {
154 for(size_t i = 0; i < AllocationSizes.size(); ++i)
155 if(AllocationSizes[i].ImageSizeMax > 0)
156 return true;
157 return false;
158 }
159};
160
161struct PoolTestResult
162{
163 duration TotalTime;
164 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
165 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
166 size_t LostAllocationCount, LostAllocationTotalSize;
167 size_t FailedAllocationCount, FailedAllocationTotalSize;
168};
169
170static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
171
Adam Sawicki51fa9662018-10-03 13:44:29 +0200172uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200173
Adam Sawickib8333fb2018-03-13 16:15:53 +0100174struct BufferInfo
175{
176 VkBuffer Buffer = VK_NULL_HANDLE;
177 VmaAllocation Allocation = VK_NULL_HANDLE;
178};
179
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200180static uint32_t GetAllocationStrategyCount()
181{
182 uint32_t strategyCount = 0;
183 switch(ConfigType)
184 {
185 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
186 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
187 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
188 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
189 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
190 default: assert(0);
191 }
192 return strategyCount;
193}
194
195static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
196{
197 switch(allocStrategy)
198 {
199 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
200 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
201 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
202 case 0: return "Default"; break;
203 default: assert(0); return "";
204 }
205}
206
Adam Sawickib8333fb2018-03-13 16:15:53 +0100207static void InitResult(Result& outResult)
208{
209 outResult.TotalTime = duration::zero();
210 outResult.AllocationTimeMin = duration::max();
211 outResult.AllocationTimeAvg = duration::zero();
212 outResult.AllocationTimeMax = duration::min();
213 outResult.DeallocationTimeMin = duration::max();
214 outResult.DeallocationTimeAvg = duration::zero();
215 outResult.DeallocationTimeMax = duration::min();
216 outResult.TotalMemoryAllocated = 0;
217 outResult.FreeRangeSizeAvg = 0;
218 outResult.FreeRangeSizeMax = 0;
219}
220
221class TimeRegisterObj
222{
223public:
224 TimeRegisterObj(duration& min, duration& sum, duration& max) :
225 m_Min(min),
226 m_Sum(sum),
227 m_Max(max),
228 m_TimeBeg(std::chrono::high_resolution_clock::now())
229 {
230 }
231
232 ~TimeRegisterObj()
233 {
234 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
235 m_Sum += d;
236 if(d < m_Min) m_Min = d;
237 if(d > m_Max) m_Max = d;
238 }
239
240private:
241 duration& m_Min;
242 duration& m_Sum;
243 duration& m_Max;
244 time_point m_TimeBeg;
245};
246
247struct PoolTestThreadResult
248{
249 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
250 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
251 size_t AllocationCount, DeallocationCount;
252 size_t LostAllocationCount, LostAllocationTotalSize;
253 size_t FailedAllocationCount, FailedAllocationTotalSize;
254};
255
256class AllocationTimeRegisterObj : public TimeRegisterObj
257{
258public:
259 AllocationTimeRegisterObj(Result& result) :
260 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
261 {
262 }
263};
264
265class DeallocationTimeRegisterObj : public TimeRegisterObj
266{
267public:
268 DeallocationTimeRegisterObj(Result& result) :
269 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
270 {
271 }
272};
273
274class PoolAllocationTimeRegisterObj : public TimeRegisterObj
275{
276public:
277 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
278 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
279 {
280 }
281};
282
283class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
284{
285public:
286 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
287 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
288 {
289 }
290};
291
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200292static void CurrentTimeToStr(std::string& out)
293{
294 time_t rawTime; time(&rawTime);
295 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
296 char timeStr[128];
297 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
298 out = timeStr;
299}
300
Adam Sawickib8333fb2018-03-13 16:15:53 +0100301VkResult MainTest(Result& outResult, const Config& config)
302{
303 assert(config.ThreadCount > 0);
304
305 InitResult(outResult);
306
307 RandomNumberGenerator mainRand{config.RandSeed};
308
309 time_point timeBeg = std::chrono::high_resolution_clock::now();
310
311 std::atomic<size_t> allocationCount = 0;
312 VkResult res = VK_SUCCESS;
313
314 uint32_t memUsageProbabilitySum =
315 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
316 config.MemUsageProbability[2] + config.MemUsageProbability[3];
317 assert(memUsageProbabilitySum > 0);
318
319 uint32_t allocationSizeProbabilitySum = std::accumulate(
320 config.AllocationSizes.begin(),
321 config.AllocationSizes.end(),
322 0u,
323 [](uint32_t sum, const AllocationSize& allocSize) {
324 return sum + allocSize.Probability;
325 });
326
327 struct Allocation
328 {
329 VkBuffer Buffer;
330 VkImage Image;
331 VmaAllocation Alloc;
332 };
333
334 std::vector<Allocation> commonAllocations;
335 std::mutex commonAllocationsMutex;
336
337 auto Allocate = [&](
338 VkDeviceSize bufferSize,
339 const VkExtent2D imageExtent,
340 RandomNumberGenerator& localRand,
341 VkDeviceSize& totalAllocatedBytes,
342 std::vector<Allocation>& allocations) -> VkResult
343 {
344 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
345
346 uint32_t memUsageIndex = 0;
347 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
348 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
349 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
350
351 VmaAllocationCreateInfo memReq = {};
352 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200353 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100354
355 Allocation allocation = {};
356 VmaAllocationInfo allocationInfo;
357
358 // Buffer
359 if(bufferSize > 0)
360 {
361 assert(imageExtent.width == 0);
362 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
363 bufferInfo.size = bufferSize;
364 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
365
366 {
367 AllocationTimeRegisterObj timeRegisterObj{outResult};
368 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
369 }
370 }
371 // Image
372 else
373 {
374 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
375 imageInfo.imageType = VK_IMAGE_TYPE_2D;
376 imageInfo.extent.width = imageExtent.width;
377 imageInfo.extent.height = imageExtent.height;
378 imageInfo.extent.depth = 1;
379 imageInfo.mipLevels = 1;
380 imageInfo.arrayLayers = 1;
381 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
382 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
383 VK_IMAGE_TILING_OPTIMAL :
384 VK_IMAGE_TILING_LINEAR;
385 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
386 switch(memReq.usage)
387 {
388 case VMA_MEMORY_USAGE_GPU_ONLY:
389 switch(localRand.Generate() % 3)
390 {
391 case 0:
392 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
393 break;
394 case 1:
395 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
396 break;
397 case 2:
398 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
399 break;
400 }
401 break;
402 case VMA_MEMORY_USAGE_CPU_ONLY:
403 case VMA_MEMORY_USAGE_CPU_TO_GPU:
404 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
405 break;
406 case VMA_MEMORY_USAGE_GPU_TO_CPU:
407 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
408 break;
409 }
410 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
411 imageInfo.flags = 0;
412
413 {
414 AllocationTimeRegisterObj timeRegisterObj{outResult};
415 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
416 }
417 }
418
419 if(res == VK_SUCCESS)
420 {
421 ++allocationCount;
422 totalAllocatedBytes += allocationInfo.size;
423 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
424 if(useCommonAllocations)
425 {
426 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
427 commonAllocations.push_back(allocation);
428 }
429 else
430 allocations.push_back(allocation);
431 }
432 else
433 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200434 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100435 }
436 return res;
437 };
438
439 auto GetNextAllocationSize = [&](
440 VkDeviceSize& outBufSize,
441 VkExtent2D& outImageSize,
442 RandomNumberGenerator& localRand)
443 {
444 outBufSize = 0;
445 outImageSize = {0, 0};
446
447 uint32_t allocSizeIndex = 0;
448 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
449 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
450 r -= config.AllocationSizes[allocSizeIndex++].Probability;
451
452 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
453 if(allocSize.BufferSizeMax > 0)
454 {
455 assert(allocSize.ImageSizeMax == 0);
456 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
457 outBufSize = allocSize.BufferSizeMin;
458 else
459 {
460 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
461 outBufSize = outBufSize / 16 * 16;
462 }
463 }
464 else
465 {
466 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
467 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
468 else
469 {
470 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
471 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
472 }
473 }
474 };
475
476 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
477 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
478
479 auto ThreadProc = [&](uint32_t randSeed) -> void
480 {
481 RandomNumberGenerator threadRand(randSeed);
482 VkDeviceSize threadTotalAllocatedBytes = 0;
483 std::vector<Allocation> threadAllocations;
484 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
485 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
486 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
487
488 // BEGIN ALLOCATIONS
489 for(;;)
490 {
491 VkDeviceSize bufferSize = 0;
492 VkExtent2D imageExtent = {};
493 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
494 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
495 threadBeginBytesToAllocate)
496 {
497 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
498 break;
499 }
500 else
501 break;
502 }
503
504 // ADDITIONAL ALLOCATIONS AND FREES
505 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
506 {
507 VkDeviceSize bufferSize = 0;
508 VkExtent2D imageExtent = {};
509 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
510
511 // true = allocate, false = free
512 bool allocate = threadRand.Generate() % 2 != 0;
513
514 if(allocate)
515 {
516 if(threadTotalAllocatedBytes +
517 bufferSize +
518 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
519 threadMaxBytesToAllocate)
520 {
521 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
522 break;
523 }
524 }
525 else
526 {
527 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
528 if(useCommonAllocations)
529 {
530 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
531 if(!commonAllocations.empty())
532 {
533 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
534 VmaAllocationInfo allocationInfo;
535 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
536 if(threadTotalAllocatedBytes >= allocationInfo.size)
537 {
538 DeallocationTimeRegisterObj timeRegisterObj{outResult};
539 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
540 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
541 else
542 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
543 threadTotalAllocatedBytes -= allocationInfo.size;
544 commonAllocations.erase(commonAllocations.begin() + indexToFree);
545 }
546 }
547 }
548 else
549 {
550 if(!threadAllocations.empty())
551 {
552 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
553 VmaAllocationInfo allocationInfo;
554 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
555 if(threadTotalAllocatedBytes >= allocationInfo.size)
556 {
557 DeallocationTimeRegisterObj timeRegisterObj{outResult};
558 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
559 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
560 else
561 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
562 threadTotalAllocatedBytes -= allocationInfo.size;
563 threadAllocations.erase(threadAllocations.begin() + indexToFree);
564 }
565 }
566 }
567 }
568 }
569
570 ++numThreadsReachedMaxAllocations;
571
572 WaitForSingleObject(threadsFinishEvent, INFINITE);
573
574 // DEALLOCATION
575 while(!threadAllocations.empty())
576 {
577 size_t indexToFree = 0;
578 switch(config.FreeOrder)
579 {
580 case FREE_ORDER::FORWARD:
581 indexToFree = 0;
582 break;
583 case FREE_ORDER::BACKWARD:
584 indexToFree = threadAllocations.size() - 1;
585 break;
586 case FREE_ORDER::RANDOM:
587 indexToFree = mainRand.Generate() % threadAllocations.size();
588 break;
589 }
590
591 {
592 DeallocationTimeRegisterObj timeRegisterObj{outResult};
593 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
594 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
595 else
596 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
597 }
598 threadAllocations.erase(threadAllocations.begin() + indexToFree);
599 }
600 };
601
602 uint32_t threadRandSeed = mainRand.Generate();
603 std::vector<std::thread> bkgThreads;
604 for(size_t i = 0; i < config.ThreadCount; ++i)
605 {
606 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
607 }
608
609 // Wait for threads reached max allocations
610 while(numThreadsReachedMaxAllocations < config.ThreadCount)
611 Sleep(0);
612
613 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
614 VmaStats vmaStats = {};
615 vmaCalculateStats(g_hAllocator, &vmaStats);
616 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
617 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
618 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
619
620 // Signal threads to deallocate
621 SetEvent(threadsFinishEvent);
622
623 // Wait for threads finished
624 for(size_t i = 0; i < bkgThreads.size(); ++i)
625 bkgThreads[i].join();
626 bkgThreads.clear();
627
628 CloseHandle(threadsFinishEvent);
629
630 // Deallocate remaining common resources
631 while(!commonAllocations.empty())
632 {
633 size_t indexToFree = 0;
634 switch(config.FreeOrder)
635 {
636 case FREE_ORDER::FORWARD:
637 indexToFree = 0;
638 break;
639 case FREE_ORDER::BACKWARD:
640 indexToFree = commonAllocations.size() - 1;
641 break;
642 case FREE_ORDER::RANDOM:
643 indexToFree = mainRand.Generate() % commonAllocations.size();
644 break;
645 }
646
647 {
648 DeallocationTimeRegisterObj timeRegisterObj{outResult};
649 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
650 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
651 else
652 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
653 }
654 commonAllocations.erase(commonAllocations.begin() + indexToFree);
655 }
656
657 if(allocationCount)
658 {
659 outResult.AllocationTimeAvg /= allocationCount;
660 outResult.DeallocationTimeAvg /= allocationCount;
661 }
662
663 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
664
665 return res;
666}
667
Adam Sawicki51fa9662018-10-03 13:44:29 +0200668void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100669{
670 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200671 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100672 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200673 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100674}
675
676struct AllocInfo
677{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200678 VmaAllocation m_Allocation = VK_NULL_HANDLE;
679 VkBuffer m_Buffer = VK_NULL_HANDLE;
680 VkImage m_Image = VK_NULL_HANDLE;
681 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100682 union
683 {
684 VkBufferCreateInfo m_BufferInfo;
685 VkImageCreateInfo m_ImageInfo;
686 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200687
688 void CreateBuffer(
689 const VkBufferCreateInfo& bufCreateInfo,
690 const VmaAllocationCreateInfo& allocCreateInfo);
691 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100692};
693
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200694void AllocInfo::CreateBuffer(
695 const VkBufferCreateInfo& bufCreateInfo,
696 const VmaAllocationCreateInfo& allocCreateInfo)
697{
698 m_BufferInfo = bufCreateInfo;
699 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
700 TEST(res == VK_SUCCESS);
701}
702
703void AllocInfo::Destroy()
704{
705 if(m_Image)
706 {
707 vkDestroyImage(g_hDevice, m_Image, nullptr);
708 }
709 if(m_Buffer)
710 {
711 vkDestroyBuffer(g_hDevice, m_Buffer, nullptr);
712 }
713 if(m_Allocation)
714 {
715 vmaFreeMemory(g_hAllocator, m_Allocation);
716 }
717}
718
Adam Sawickif2975342018-10-16 13:49:02 +0200719class StagingBufferCollection
720{
721public:
722 StagingBufferCollection() { }
723 ~StagingBufferCollection();
724 // Returns false if maximum total size of buffers would be exceeded.
725 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
726 void ReleaseAllBuffers();
727
728private:
729 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
730 struct BufInfo
731 {
732 VmaAllocation Allocation = VK_NULL_HANDLE;
733 VkBuffer Buffer = VK_NULL_HANDLE;
734 VkDeviceSize Size = VK_WHOLE_SIZE;
735 void* MappedPtr = nullptr;
736 bool Used = false;
737 };
738 std::vector<BufInfo> m_Bufs;
739 // Including both used and unused.
740 VkDeviceSize m_TotalSize = 0;
741};
742
743StagingBufferCollection::~StagingBufferCollection()
744{
745 for(size_t i = m_Bufs.size(); i--; )
746 {
747 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
748 }
749}
750
751bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
752{
753 assert(size <= MAX_TOTAL_SIZE);
754
755 // Try to find existing unused buffer with best size.
756 size_t bestIndex = SIZE_MAX;
757 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
758 {
759 BufInfo& currBufInfo = m_Bufs[i];
760 if(!currBufInfo.Used && currBufInfo.Size >= size &&
761 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
762 {
763 bestIndex = i;
764 }
765 }
766
767 if(bestIndex != SIZE_MAX)
768 {
769 m_Bufs[bestIndex].Used = true;
770 outBuffer = m_Bufs[bestIndex].Buffer;
771 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
772 return true;
773 }
774
775 // Allocate new buffer with requested size.
776 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
777 {
778 BufInfo bufInfo;
779 bufInfo.Size = size;
780 bufInfo.Used = true;
781
782 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
783 bufCreateInfo.size = size;
784 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
785
786 VmaAllocationCreateInfo allocCreateInfo = {};
787 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
788 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
789
790 VmaAllocationInfo allocInfo;
791 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
792 bufInfo.MappedPtr = allocInfo.pMappedData;
793 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
794
795 outBuffer = bufInfo.Buffer;
796 outMappedPtr = bufInfo.MappedPtr;
797
798 m_Bufs.push_back(std::move(bufInfo));
799
800 m_TotalSize += size;
801
802 return true;
803 }
804
805 // There are some unused but smaller buffers: Free them and try again.
806 bool hasUnused = false;
807 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
808 {
809 if(!m_Bufs[i].Used)
810 {
811 hasUnused = true;
812 break;
813 }
814 }
815 if(hasUnused)
816 {
817 for(size_t i = m_Bufs.size(); i--; )
818 {
819 if(!m_Bufs[i].Used)
820 {
821 m_TotalSize -= m_Bufs[i].Size;
822 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
823 m_Bufs.erase(m_Bufs.begin() + i);
824 }
825 }
826
827 return AcquireBuffer(size, outBuffer, outMappedPtr);
828 }
829
830 return false;
831}
832
833void StagingBufferCollection::ReleaseAllBuffers()
834{
835 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
836 {
837 m_Bufs[i].Used = false;
838 }
839}
840
841static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
842{
843 StagingBufferCollection stagingBufs;
844
845 bool cmdBufferStarted = false;
846 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
847 {
848 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
849 if(currAllocInfo.m_Buffer)
850 {
851 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
852
853 VkBuffer stagingBuf = VK_NULL_HANDLE;
854 void* stagingBufMappedPtr = nullptr;
855 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
856 {
857 TEST(cmdBufferStarted);
858 EndSingleTimeCommands();
859 stagingBufs.ReleaseAllBuffers();
860 cmdBufferStarted = false;
861
862 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
863 TEST(ok);
864 }
865
866 // Fill staging buffer.
867 {
868 assert(size % sizeof(uint32_t) == 0);
869 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
870 uint32_t val = currAllocInfo.m_StartValue;
871 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
872 {
873 *stagingValPtr = val;
874 ++stagingValPtr;
875 ++val;
876 }
877 }
878
879 // Issue copy command from staging buffer to destination buffer.
880 if(!cmdBufferStarted)
881 {
882 cmdBufferStarted = true;
883 BeginSingleTimeCommands();
884 }
885
886 VkBufferCopy copy = {};
887 copy.srcOffset = 0;
888 copy.dstOffset = 0;
889 copy.size = size;
890 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
891 }
892 else
893 {
894 TEST(0 && "Images not currently supported.");
895 }
896 }
897
898 if(cmdBufferStarted)
899 {
900 EndSingleTimeCommands();
901 stagingBufs.ReleaseAllBuffers();
902 }
903}
904
905static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
906{
907 StagingBufferCollection stagingBufs;
908
909 bool cmdBufferStarted = false;
910 size_t validateAllocIndexOffset = 0;
911 std::vector<void*> validateStagingBuffers;
912 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
913 {
914 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
915 if(currAllocInfo.m_Buffer)
916 {
917 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
918
919 VkBuffer stagingBuf = VK_NULL_HANDLE;
920 void* stagingBufMappedPtr = nullptr;
921 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
922 {
923 TEST(cmdBufferStarted);
924 EndSingleTimeCommands();
925 cmdBufferStarted = false;
926
927 for(size_t validateIndex = 0;
928 validateIndex < validateStagingBuffers.size();
929 ++validateIndex)
930 {
931 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
932 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
933 TEST(validateSize % sizeof(uint32_t) == 0);
934 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
935 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
936 bool valid = true;
937 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
938 {
939 if(*stagingValPtr != val)
940 {
941 valid = false;
942 break;
943 }
944 ++stagingValPtr;
945 ++val;
946 }
947 TEST(valid);
948 }
949
950 stagingBufs.ReleaseAllBuffers();
951
952 validateAllocIndexOffset = allocInfoIndex;
953 validateStagingBuffers.clear();
954
955 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
956 TEST(ok);
957 }
958
959 // Issue copy command from staging buffer to destination buffer.
960 if(!cmdBufferStarted)
961 {
962 cmdBufferStarted = true;
963 BeginSingleTimeCommands();
964 }
965
966 VkBufferCopy copy = {};
967 copy.srcOffset = 0;
968 copy.dstOffset = 0;
969 copy.size = size;
970 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
971
972 // Sava mapped pointer for later validation.
973 validateStagingBuffers.push_back(stagingBufMappedPtr);
974 }
975 else
976 {
977 TEST(0 && "Images not currently supported.");
978 }
979 }
980
981 if(cmdBufferStarted)
982 {
983 EndSingleTimeCommands();
984
985 for(size_t validateIndex = 0;
986 validateIndex < validateStagingBuffers.size();
987 ++validateIndex)
988 {
989 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
990 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
991 TEST(validateSize % sizeof(uint32_t) == 0);
992 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
993 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
994 bool valid = true;
995 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
996 {
997 if(*stagingValPtr != val)
998 {
999 valid = false;
1000 break;
1001 }
1002 ++stagingValPtr;
1003 ++val;
1004 }
1005 TEST(valid);
1006 }
1007
1008 stagingBufs.ReleaseAllBuffers();
1009 }
1010}
1011
Adam Sawickib8333fb2018-03-13 16:15:53 +01001012static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1013{
1014 outMemReq = {};
1015 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1016 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1017}
1018
1019static void CreateBuffer(
1020 VmaPool pool,
1021 const VkBufferCreateInfo& bufCreateInfo,
1022 bool persistentlyMapped,
1023 AllocInfo& outAllocInfo)
1024{
1025 outAllocInfo = {};
1026 outAllocInfo.m_BufferInfo = bufCreateInfo;
1027
1028 VmaAllocationCreateInfo allocCreateInfo = {};
1029 allocCreateInfo.pool = pool;
1030 if(persistentlyMapped)
1031 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1032
1033 VmaAllocationInfo vmaAllocInfo = {};
1034 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1035
1036 // Setup StartValue and fill.
1037 {
1038 outAllocInfo.m_StartValue = (uint32_t)rand();
1039 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001040 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001041 if(!persistentlyMapped)
1042 {
1043 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1044 }
1045
1046 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001047 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001048 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1049 data[i] = value++;
1050
1051 if(!persistentlyMapped)
1052 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1053 }
1054}
1055
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001056static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001057{
1058 outAllocation.m_Allocation = nullptr;
1059 outAllocation.m_Buffer = nullptr;
1060 outAllocation.m_Image = nullptr;
1061 outAllocation.m_StartValue = (uint32_t)rand();
1062
1063 VmaAllocationCreateInfo vmaMemReq;
1064 GetMemReq(vmaMemReq);
1065
1066 VmaAllocationInfo allocInfo;
1067
1068 const bool isBuffer = true;//(rand() & 0x1) != 0;
1069 const bool isLarge = (rand() % 16) == 0;
1070 if(isBuffer)
1071 {
1072 const uint32_t bufferSize = isLarge ?
1073 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1074 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1075
1076 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1077 bufferInfo.size = bufferSize;
1078 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1079
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001080 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001081 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001082 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001083 }
1084 else
1085 {
1086 const uint32_t imageSizeX = isLarge ?
1087 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1088 rand() % 1024 + 1; // 1 ... 1024
1089 const uint32_t imageSizeY = isLarge ?
1090 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1091 rand() % 1024 + 1; // 1 ... 1024
1092
1093 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1094 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1095 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1096 imageInfo.extent.width = imageSizeX;
1097 imageInfo.extent.height = imageSizeY;
1098 imageInfo.extent.depth = 1;
1099 imageInfo.mipLevels = 1;
1100 imageInfo.arrayLayers = 1;
1101 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1102 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1103 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1104 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1105
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001106 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001107 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001108 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001109 }
1110
1111 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1112 if(allocInfo.pMappedData == nullptr)
1113 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001114 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001115 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001116 }
1117
1118 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001119 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001120 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1121 data[i] = value++;
1122
1123 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001124 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001125}
1126
1127static void DestroyAllocation(const AllocInfo& allocation)
1128{
1129 if(allocation.m_Buffer)
1130 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1131 else
1132 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1133}
1134
1135static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1136{
1137 for(size_t i = allocations.size(); i--; )
1138 DestroyAllocation(allocations[i]);
1139 allocations.clear();
1140}
1141
1142static void ValidateAllocationData(const AllocInfo& allocation)
1143{
1144 VmaAllocationInfo allocInfo;
1145 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1146
1147 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1148 if(allocInfo.pMappedData == nullptr)
1149 {
1150 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001151 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001152 }
1153
1154 uint32_t value = allocation.m_StartValue;
1155 bool ok = true;
1156 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001157 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001158 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1159 {
1160 if(data[i] != value++)
1161 {
1162 ok = false;
1163 break;
1164 }
1165 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001166 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001167
1168 if(allocInfo.pMappedData == nullptr)
1169 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1170}
1171
1172static void RecreateAllocationResource(AllocInfo& allocation)
1173{
1174 VmaAllocationInfo allocInfo;
1175 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1176
1177 if(allocation.m_Buffer)
1178 {
1179 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, nullptr);
1180
1181 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, nullptr, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001182 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001183
1184 // Just to silence validation layer warnings.
1185 VkMemoryRequirements vkMemReq;
1186 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001187 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001188
1189 res = vkBindBufferMemory(g_hDevice, allocation.m_Buffer, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001190 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001191 }
1192 else
1193 {
1194 vkDestroyImage(g_hDevice, allocation.m_Image, nullptr);
1195
1196 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, nullptr, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001197 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001198
1199 // Just to silence validation layer warnings.
1200 VkMemoryRequirements vkMemReq;
1201 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1202
1203 res = vkBindImageMemory(g_hDevice, allocation.m_Image, allocInfo.deviceMemory, allocInfo.offset);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001204 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001205 }
1206}
1207
1208static void Defragment(AllocInfo* allocs, size_t allocCount,
1209 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1210 VmaDefragmentationStats* defragmentationStats = nullptr)
1211{
1212 std::vector<VmaAllocation> vmaAllocs(allocCount);
1213 for(size_t i = 0; i < allocCount; ++i)
1214 vmaAllocs[i] = allocs[i].m_Allocation;
1215
1216 std::vector<VkBool32> allocChanged(allocCount);
1217
1218 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1219 defragmentationInfo, defragmentationStats) );
1220
1221 for(size_t i = 0; i < allocCount; ++i)
1222 {
1223 if(allocChanged[i])
1224 {
1225 RecreateAllocationResource(allocs[i]);
1226 }
1227 }
1228}
1229
1230static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1231{
1232 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1233 ValidateAllocationData(allocInfo);
1234 });
1235}
1236
1237void TestDefragmentationSimple()
1238{
1239 wprintf(L"Test defragmentation simple\n");
1240
1241 RandomNumberGenerator rand(667);
1242
1243 const VkDeviceSize BUF_SIZE = 0x10000;
1244 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1245
1246 const VkDeviceSize MIN_BUF_SIZE = 32;
1247 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1248 auto RandomBufSize = [&]() -> VkDeviceSize {
1249 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1250 };
1251
1252 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1253 bufCreateInfo.size = BUF_SIZE;
1254 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1255
1256 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1257 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1258
1259 uint32_t memTypeIndex = UINT32_MAX;
1260 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1261
1262 VmaPoolCreateInfo poolCreateInfo = {};
1263 poolCreateInfo.blockSize = BLOCK_SIZE;
1264 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1265
1266 VmaPool pool;
1267 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1268
Adam Sawickie1681912018-11-23 17:50:12 +01001269 // Defragmentation of empty pool.
1270 {
1271 VmaDefragmentationInfo2 defragInfo = {};
1272 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1273 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1274 defragInfo.poolCount = 1;
1275 defragInfo.pPools = &pool;
1276
1277 VmaDefragmentationStats defragStats = {};
1278 VmaDefragmentationContext defragCtx = nullptr;
1279 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1280 TEST(res >= VK_SUCCESS);
1281 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1282 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1283 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1284 }
1285
Adam Sawickib8333fb2018-03-13 16:15:53 +01001286 std::vector<AllocInfo> allocations;
1287
1288 // persistentlyMappedOption = 0 - not persistently mapped.
1289 // persistentlyMappedOption = 1 - persistently mapped.
1290 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1291 {
1292 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1293 const bool persistentlyMapped = persistentlyMappedOption != 0;
1294
1295 // # Test 1
1296 // Buffers of fixed size.
1297 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1298 // Expected result: at least 1 block freed.
1299 {
1300 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1301 {
1302 AllocInfo allocInfo;
1303 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1304 allocations.push_back(allocInfo);
1305 }
1306
1307 for(size_t i = 1; i < allocations.size(); ++i)
1308 {
1309 DestroyAllocation(allocations[i]);
1310 allocations.erase(allocations.begin() + i);
1311 }
1312
1313 VmaDefragmentationStats defragStats;
1314 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001315 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1316 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001317
1318 ValidateAllocationsData(allocations.data(), allocations.size());
1319
1320 DestroyAllAllocations(allocations);
1321 }
1322
1323 // # Test 2
1324 // Buffers of fixed size.
1325 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1326 // Expected result: Each of 4 interations makes some progress.
1327 {
1328 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1329 {
1330 AllocInfo allocInfo;
1331 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1332 allocations.push_back(allocInfo);
1333 }
1334
1335 for(size_t i = 1; i < allocations.size(); ++i)
1336 {
1337 DestroyAllocation(allocations[i]);
1338 allocations.erase(allocations.begin() + i);
1339 }
1340
1341 VmaDefragmentationInfo defragInfo = {};
1342 defragInfo.maxAllocationsToMove = 1;
1343 defragInfo.maxBytesToMove = BUF_SIZE;
1344
1345 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1346 {
1347 VmaDefragmentationStats defragStats;
1348 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001349 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001350 }
1351
1352 ValidateAllocationsData(allocations.data(), allocations.size());
1353
1354 DestroyAllAllocations(allocations);
1355 }
1356
1357 // # Test 3
1358 // Buffers of variable size.
1359 // Create a number of buffers. Remove some percent of them.
1360 // Defragment while having some percent of them unmovable.
1361 // Expected result: Just simple validation.
1362 {
1363 for(size_t i = 0; i < 100; ++i)
1364 {
1365 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1366 localBufCreateInfo.size = RandomBufSize();
1367
1368 AllocInfo allocInfo;
1369 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1370 allocations.push_back(allocInfo);
1371 }
1372
1373 const uint32_t percentToDelete = 60;
1374 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1375 for(size_t i = 0; i < numberToDelete; ++i)
1376 {
1377 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1378 DestroyAllocation(allocations[indexToDelete]);
1379 allocations.erase(allocations.begin() + indexToDelete);
1380 }
1381
1382 // Non-movable allocations will be at the beginning of allocations array.
1383 const uint32_t percentNonMovable = 20;
1384 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1385 for(size_t i = 0; i < numberNonMovable; ++i)
1386 {
1387 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1388 if(indexNonMovable != i)
1389 std::swap(allocations[i], allocations[indexNonMovable]);
1390 }
1391
1392 VmaDefragmentationStats defragStats;
1393 Defragment(
1394 allocations.data() + numberNonMovable,
1395 allocations.size() - numberNonMovable,
1396 nullptr, &defragStats);
1397
1398 ValidateAllocationsData(allocations.data(), allocations.size());
1399
1400 DestroyAllAllocations(allocations);
1401 }
1402 }
1403
Adam Sawicki647cf242018-11-23 17:58:00 +01001404 /*
1405 Allocation that must be move to an overlapping place using memmove().
1406 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1407 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001408 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001409 {
1410 AllocInfo allocInfo[2];
1411
1412 bufCreateInfo.size = BUF_SIZE;
1413 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1414 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1415 bufCreateInfo.size = biggerBufSize;
1416 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1417
1418 DestroyAllocation(allocInfo[0]);
1419
1420 VmaDefragmentationStats defragStats;
1421 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1422 // If this fails, it means we couldn't do memmove with overlapping regions.
1423 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1424
1425 ValidateAllocationsData(&allocInfo[1], 1);
1426 DestroyAllocation(allocInfo[1]);
1427 }
1428
Adam Sawickib8333fb2018-03-13 16:15:53 +01001429 vmaDestroyPool(g_hAllocator, pool);
1430}
1431
Adam Sawicki52076eb2018-11-22 16:14:50 +01001432void TestDefragmentationWholePool()
1433{
1434 wprintf(L"Test defragmentation whole pool\n");
1435
1436 RandomNumberGenerator rand(668);
1437
1438 const VkDeviceSize BUF_SIZE = 0x10000;
1439 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1440
1441 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1442 bufCreateInfo.size = BUF_SIZE;
1443 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1444
1445 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1446 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1447
1448 uint32_t memTypeIndex = UINT32_MAX;
1449 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1450
1451 VmaPoolCreateInfo poolCreateInfo = {};
1452 poolCreateInfo.blockSize = BLOCK_SIZE;
1453 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1454
1455 VmaDefragmentationStats defragStats[2];
1456 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1457 {
1458 VmaPool pool;
1459 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1460
1461 std::vector<AllocInfo> allocations;
1462
1463 // Buffers of fixed size.
1464 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1465 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1466 {
1467 AllocInfo allocInfo;
1468 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1469 allocations.push_back(allocInfo);
1470 }
1471
1472 for(size_t i = 1; i < allocations.size(); ++i)
1473 {
1474 DestroyAllocation(allocations[i]);
1475 allocations.erase(allocations.begin() + i);
1476 }
1477
1478 VmaDefragmentationInfo2 defragInfo = {};
1479 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1480 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1481 std::vector<VmaAllocation> allocationsToDefrag;
1482 if(caseIndex == 0)
1483 {
1484 defragInfo.poolCount = 1;
1485 defragInfo.pPools = &pool;
1486 }
1487 else
1488 {
1489 const size_t allocCount = allocations.size();
1490 allocationsToDefrag.resize(allocCount);
1491 std::transform(
1492 allocations.begin(), allocations.end(),
1493 allocationsToDefrag.begin(),
1494 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1495 defragInfo.allocationCount = (uint32_t)allocCount;
1496 defragInfo.pAllocations = allocationsToDefrag.data();
1497 }
1498
1499 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1500 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1501 TEST(res >= VK_SUCCESS);
1502 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1503
1504 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1505
1506 ValidateAllocationsData(allocations.data(), allocations.size());
1507
1508 DestroyAllAllocations(allocations);
1509
1510 vmaDestroyPool(g_hAllocator, pool);
1511 }
1512
1513 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1514 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1515 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1516 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1517}
1518
Adam Sawickib8333fb2018-03-13 16:15:53 +01001519void TestDefragmentationFull()
1520{
1521 std::vector<AllocInfo> allocations;
1522
1523 // Create initial allocations.
1524 for(size_t i = 0; i < 400; ++i)
1525 {
1526 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001527 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001528 allocations.push_back(allocation);
1529 }
1530
1531 // Delete random allocations
1532 const size_t allocationsToDeletePercent = 80;
1533 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1534 for(size_t i = 0; i < allocationsToDelete; ++i)
1535 {
1536 size_t index = (size_t)rand() % allocations.size();
1537 DestroyAllocation(allocations[index]);
1538 allocations.erase(allocations.begin() + index);
1539 }
1540
1541 for(size_t i = 0; i < allocations.size(); ++i)
1542 ValidateAllocationData(allocations[i]);
1543
Adam Sawicki0667e332018-08-24 17:26:44 +02001544 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001545
1546 {
1547 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1548 for(size_t i = 0; i < allocations.size(); ++i)
1549 vmaAllocations[i] = allocations[i].m_Allocation;
1550
1551 const size_t nonMovablePercent = 0;
1552 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1553 for(size_t i = 0; i < nonMovableCount; ++i)
1554 {
1555 size_t index = (size_t)rand() % vmaAllocations.size();
1556 vmaAllocations.erase(vmaAllocations.begin() + index);
1557 }
1558
1559 const uint32_t defragCount = 1;
1560 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1561 {
1562 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1563
1564 VmaDefragmentationInfo defragmentationInfo;
1565 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1566 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1567
1568 wprintf(L"Defragmentation #%u\n", defragIndex);
1569
1570 time_point begTime = std::chrono::high_resolution_clock::now();
1571
1572 VmaDefragmentationStats stats;
1573 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001574 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001575
1576 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1577
1578 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1579 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1580 wprintf(L"Time: %.2f s\n", defragmentDuration);
1581
1582 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1583 {
1584 if(allocationsChanged[i])
1585 {
1586 RecreateAllocationResource(allocations[i]);
1587 }
1588 }
1589
1590 for(size_t i = 0; i < allocations.size(); ++i)
1591 ValidateAllocationData(allocations[i]);
1592
Adam Sawicki0667e332018-08-24 17:26:44 +02001593 //wchar_t fileName[MAX_PATH];
1594 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1595 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001596 }
1597 }
1598
1599 // Destroy all remaining allocations.
1600 DestroyAllAllocations(allocations);
1601}
1602
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001603static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001604{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001605 wprintf(L"Test defragmentation GPU\n");
Adam Sawicki05704002018-11-08 16:07:29 +01001606 g_MemoryAliasingWarningEnabled = false;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001607
1608 std::vector<AllocInfo> allocations;
1609
1610 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001611 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1612 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001613 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001614 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1615 const size_t percentToLeave = 30;
1616 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001617 RandomNumberGenerator rand = { 234522 };
1618
1619 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001620
1621 VmaAllocationCreateInfo allocCreateInfo = {};
1622 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001623 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001624
1625 // Create all intended buffers.
1626 for(size_t i = 0; i < bufCount; ++i)
1627 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001628 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1629
1630 if(rand.Generate() % 100 < percentNonMovable)
1631 {
1632 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1633 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1634 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1635 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1636 }
1637 else
1638 {
1639 // Different usage just to see different color in output from VmaDumpVis.
1640 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1641 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1642 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1643 // And in JSON dump.
1644 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1645 }
1646
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001647 AllocInfo alloc;
1648 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1649 alloc.m_StartValue = rand.Generate();
1650 allocations.push_back(alloc);
1651 }
1652
1653 // Destroy some percentage of them.
1654 {
1655 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1656 for(size_t i = 0; i < buffersToDestroy; ++i)
1657 {
1658 const size_t index = rand.Generate() % allocations.size();
1659 allocations[index].Destroy();
1660 allocations.erase(allocations.begin() + index);
1661 }
1662 }
1663
1664 // Fill them with meaningful data.
1665 UploadGpuData(allocations.data(), allocations.size());
1666
Adam Sawickic6ede152018-11-16 17:04:14 +01001667 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001668 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001669 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001670
1671 // Defragment using GPU only.
1672 {
1673 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001674
Adam Sawickic6ede152018-11-16 17:04:14 +01001675 std::vector<VmaAllocation> allocationPtrs;
1676 std::vector<VkBool32> allocationChanged;
1677 std::vector<size_t> allocationOriginalIndex;
1678
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001679 for(size_t i = 0; i < allocCount; ++i)
1680 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001681 VmaAllocationInfo allocInfo = {};
1682 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1683 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1684 {
1685 allocationPtrs.push_back(allocations[i].m_Allocation);
1686 allocationChanged.push_back(VK_FALSE);
1687 allocationOriginalIndex.push_back(i);
1688 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001689 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001690
1691 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001692
1693 BeginSingleTimeCommands();
1694
1695 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001696 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001697 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001698 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001699 defragInfo.pAllocationsChanged = allocationChanged.data();
1700 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001701 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1702 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1703
1704 VmaDefragmentationStats stats = {};
1705 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1706 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1707 TEST(res >= VK_SUCCESS);
1708
1709 EndSingleTimeCommands();
1710
1711 vmaDefragmentationEnd(g_hAllocator, ctx);
1712
Adam Sawickic6ede152018-11-16 17:04:14 +01001713 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001714 {
1715 if(allocationChanged[i])
1716 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001717 const size_t origAllocIndex = allocationOriginalIndex[i];
1718 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001719 }
1720 }
1721
Adam Sawicki440307e2018-10-18 15:05:19 +02001722 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1723 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001724 }
1725
1726 ValidateGpuData(allocations.data(), allocations.size());
1727
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001728 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001729 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001730
1731 // Destroy all remaining buffers.
1732 for(size_t i = allocations.size(); i--; )
1733 {
1734 allocations[i].Destroy();
1735 }
Adam Sawicki05704002018-11-08 16:07:29 +01001736
1737 g_MemoryAliasingWarningEnabled = true;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001738}
1739
Adam Sawickib8333fb2018-03-13 16:15:53 +01001740static void TestUserData()
1741{
1742 VkResult res;
1743
1744 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1745 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1746 bufCreateInfo.size = 0x10000;
1747
1748 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
1749 {
1750 // Opaque pointer
1751 {
1752
1753 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
1754 void* pointerToSomething = &res;
1755
1756 VmaAllocationCreateInfo allocCreateInfo = {};
1757 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1758 allocCreateInfo.pUserData = numberAsPointer;
1759 if(testIndex == 1)
1760 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1761
1762 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1763 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001764 TEST(res == VK_SUCCESS);
1765 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001766
1767 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001768 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001769
1770 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
1771 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001772 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001773
1774 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1775 }
1776
1777 // String
1778 {
1779 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
1780 const char* name2 = "2";
1781 const size_t name1Len = strlen(name1);
1782
1783 char* name1Buf = new char[name1Len + 1];
1784 strcpy_s(name1Buf, name1Len + 1, name1);
1785
1786 VmaAllocationCreateInfo allocCreateInfo = {};
1787 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1788 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
1789 allocCreateInfo.pUserData = name1Buf;
1790 if(testIndex == 1)
1791 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1792
1793 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1794 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001795 TEST(res == VK_SUCCESS);
1796 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
1797 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001798
1799 delete[] name1Buf;
1800
1801 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001802 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001803
1804 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
1805 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001806 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001807
1808 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
1809 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001810 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001811
1812 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1813 }
1814 }
1815}
1816
Adam Sawicki370ab182018-11-08 16:31:00 +01001817static void TestInvalidAllocations()
1818{
1819 VkResult res;
1820
1821 VmaAllocationCreateInfo allocCreateInfo = {};
1822 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1823
1824 // Try to allocate 0 bytes.
1825 {
1826 VkMemoryRequirements memReq = {};
1827 memReq.size = 0; // !!!
1828 memReq.alignment = 4;
1829 memReq.memoryTypeBits = UINT32_MAX;
1830 VmaAllocation alloc = VK_NULL_HANDLE;
1831 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
1832 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
1833 }
1834
1835 // Try to create buffer with size = 0.
1836 {
1837 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1838 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1839 bufCreateInfo.size = 0; // !!!
1840 VkBuffer buf = VK_NULL_HANDLE;
1841 VmaAllocation alloc = VK_NULL_HANDLE;
1842 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
1843 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1844 }
1845
1846 // Try to create image with one dimension = 0.
1847 {
1848 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1849 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
1850 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
1851 imageCreateInfo.extent.width = 128;
1852 imageCreateInfo.extent.height = 0; // !!!
1853 imageCreateInfo.extent.depth = 1;
1854 imageCreateInfo.mipLevels = 1;
1855 imageCreateInfo.arrayLayers = 1;
1856 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1857 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
1858 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1859 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1860 VkImage image = VK_NULL_HANDLE;
1861 VmaAllocation alloc = VK_NULL_HANDLE;
1862 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
1863 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
1864 }
1865}
1866
Adam Sawickib8333fb2018-03-13 16:15:53 +01001867static void TestMemoryRequirements()
1868{
1869 VkResult res;
1870 VkBuffer buf;
1871 VmaAllocation alloc;
1872 VmaAllocationInfo allocInfo;
1873
1874 const VkPhysicalDeviceMemoryProperties* memProps;
1875 vmaGetMemoryProperties(g_hAllocator, &memProps);
1876
1877 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1878 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1879 bufInfo.size = 128;
1880
1881 VmaAllocationCreateInfo allocCreateInfo = {};
1882
1883 // No requirements.
1884 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001885 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001886 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1887
1888 // Usage.
1889 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1890 allocCreateInfo.requiredFlags = 0;
1891 allocCreateInfo.preferredFlags = 0;
1892 allocCreateInfo.memoryTypeBits = UINT32_MAX;
1893
1894 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001895 TEST(res == VK_SUCCESS);
1896 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001897 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1898
1899 // Required flags, preferred flags.
1900 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
1901 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1902 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1903 allocCreateInfo.memoryTypeBits = 0;
1904
1905 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001906 TEST(res == VK_SUCCESS);
1907 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
1908 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001909 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1910
1911 // memoryTypeBits.
1912 const uint32_t memType = allocInfo.memoryType;
1913 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1914 allocCreateInfo.requiredFlags = 0;
1915 allocCreateInfo.preferredFlags = 0;
1916 allocCreateInfo.memoryTypeBits = 1u << memType;
1917
1918 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001919 TEST(res == VK_SUCCESS);
1920 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001921 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1922
1923}
1924
1925static void TestBasics()
1926{
1927 VkResult res;
1928
1929 TestMemoryRequirements();
1930
1931 // Lost allocation
1932 {
1933 VmaAllocation alloc = VK_NULL_HANDLE;
1934 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001935 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001936
1937 VmaAllocationInfo allocInfo;
1938 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001939 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
1940 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001941
1942 vmaFreeMemory(g_hAllocator, alloc);
1943 }
1944
1945 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
1946 {
1947 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1948 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
1949 bufCreateInfo.size = 128;
1950
1951 VmaAllocationCreateInfo allocCreateInfo = {};
1952 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
1953 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1954
1955 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
1956 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001957 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001958
1959 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1960
1961 // Same with OWN_MEMORY.
1962 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
1963
1964 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001965 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001966
1967 vmaDestroyBuffer(g_hAllocator, buf, alloc);
1968 }
1969
1970 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01001971
1972 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01001973}
1974
1975void TestHeapSizeLimit()
1976{
1977 const VkDeviceSize HEAP_SIZE_LIMIT = 1ull * 1024 * 1024 * 1024; // 1 GB
1978 const VkDeviceSize BLOCK_SIZE = 128ull * 1024 * 1024; // 128 MB
1979
1980 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
1981 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
1982 {
1983 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
1984 }
1985
1986 VmaAllocatorCreateInfo allocatorCreateInfo = {};
1987 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
1988 allocatorCreateInfo.device = g_hDevice;
1989 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
1990
1991 VmaAllocator hAllocator;
1992 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001993 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001994
1995 struct Item
1996 {
1997 VkBuffer hBuf;
1998 VmaAllocation hAlloc;
1999 };
2000 std::vector<Item> items;
2001
2002 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2003 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2004
2005 // 1. Allocate two blocks of Own Memory, half the size of BLOCK_SIZE.
2006 VmaAllocationInfo ownAllocInfo;
2007 {
2008 VmaAllocationCreateInfo allocCreateInfo = {};
2009 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2010 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2011
2012 bufCreateInfo.size = BLOCK_SIZE / 2;
2013
2014 for(size_t i = 0; i < 2; ++i)
2015 {
2016 Item item;
2017 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &ownAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002018 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002019 items.push_back(item);
2020 }
2021 }
2022
2023 // Create pool to make sure allocations must be out of this memory type.
2024 VmaPoolCreateInfo poolCreateInfo = {};
2025 poolCreateInfo.memoryTypeIndex = ownAllocInfo.memoryType;
2026 poolCreateInfo.blockSize = BLOCK_SIZE;
2027
2028 VmaPool hPool;
2029 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002030 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002031
2032 // 2. Allocate normal buffers from all the remaining memory.
2033 {
2034 VmaAllocationCreateInfo allocCreateInfo = {};
2035 allocCreateInfo.pool = hPool;
2036
2037 bufCreateInfo.size = BLOCK_SIZE / 2;
2038
2039 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2040 for(size_t i = 0; i < bufCount; ++i)
2041 {
2042 Item item;
2043 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002044 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002045 items.push_back(item);
2046 }
2047 }
2048
2049 // 3. Allocation of one more (even small) buffer should fail.
2050 {
2051 VmaAllocationCreateInfo allocCreateInfo = {};
2052 allocCreateInfo.pool = hPool;
2053
2054 bufCreateInfo.size = 128;
2055
2056 VkBuffer hBuf;
2057 VmaAllocation hAlloc;
2058 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002059 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002060 }
2061
2062 // Destroy everything.
2063 for(size_t i = items.size(); i--; )
2064 {
2065 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2066 }
2067
2068 vmaDestroyPool(hAllocator, hPool);
2069
2070 vmaDestroyAllocator(hAllocator);
2071}
2072
Adam Sawicki212a4a62018-06-14 15:44:45 +02002073#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002074static void TestDebugMargin()
2075{
2076 if(VMA_DEBUG_MARGIN == 0)
2077 {
2078 return;
2079 }
2080
2081 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002082 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002083
2084 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002085 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002086
2087 // Create few buffers of different size.
2088 const size_t BUF_COUNT = 10;
2089 BufferInfo buffers[BUF_COUNT];
2090 VmaAllocationInfo allocInfo[BUF_COUNT];
2091 for(size_t i = 0; i < 10; ++i)
2092 {
2093 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002094 // Last one will be mapped.
2095 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002096
2097 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002098 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002099 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002100 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002101
2102 if(i == BUF_COUNT - 1)
2103 {
2104 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002105 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002106 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2107 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2108 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002109 }
2110
2111 // Check if their offsets preserve margin between them.
2112 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2113 {
2114 if(lhs.deviceMemory != rhs.deviceMemory)
2115 {
2116 return lhs.deviceMemory < rhs.deviceMemory;
2117 }
2118 return lhs.offset < rhs.offset;
2119 });
2120 for(size_t i = 1; i < BUF_COUNT; ++i)
2121 {
2122 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2123 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002124 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002125 }
2126 }
2127
Adam Sawicki212a4a62018-06-14 15:44:45 +02002128 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002129 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002130
Adam Sawicki73b16652018-06-11 16:39:25 +02002131 // Destroy all buffers.
2132 for(size_t i = BUF_COUNT; i--; )
2133 {
2134 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2135 }
2136}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002137#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002138
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002139static void TestLinearAllocator()
2140{
2141 wprintf(L"Test linear allocator\n");
2142
2143 RandomNumberGenerator rand{645332};
2144
2145 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2146 sampleBufCreateInfo.size = 1024; // Whatever.
2147 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2148
2149 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2150 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2151
2152 VmaPoolCreateInfo poolCreateInfo = {};
2153 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002154 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002155
Adam Sawickiee082772018-06-20 17:45:49 +02002156 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002157 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2158 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2159
2160 VmaPool pool = nullptr;
2161 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002162 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002163
2164 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2165
2166 VmaAllocationCreateInfo allocCreateInfo = {};
2167 allocCreateInfo.pool = pool;
2168
2169 constexpr size_t maxBufCount = 100;
2170 std::vector<BufferInfo> bufInfo;
2171
2172 constexpr VkDeviceSize bufSizeMin = 16;
2173 constexpr VkDeviceSize bufSizeMax = 1024;
2174 VmaAllocationInfo allocInfo;
2175 VkDeviceSize prevOffset = 0;
2176
2177 // Test one-time free.
2178 for(size_t i = 0; i < 2; ++i)
2179 {
2180 // Allocate number of buffers of varying size that surely fit into this block.
2181 VkDeviceSize bufSumSize = 0;
2182 for(size_t i = 0; i < maxBufCount; ++i)
2183 {
2184 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2185 BufferInfo newBufInfo;
2186 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2187 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002188 TEST(res == VK_SUCCESS);
2189 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002190 bufInfo.push_back(newBufInfo);
2191 prevOffset = allocInfo.offset;
2192 bufSumSize += bufCreateInfo.size;
2193 }
2194
2195 // Validate pool stats.
2196 VmaPoolStats stats;
2197 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002198 TEST(stats.size == poolCreateInfo.blockSize);
2199 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
2200 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002201
2202 // Destroy the buffers in random order.
2203 while(!bufInfo.empty())
2204 {
2205 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2206 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2207 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2208 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2209 }
2210 }
2211
2212 // Test stack.
2213 {
2214 // Allocate number of buffers of varying size that surely fit into this block.
2215 for(size_t i = 0; i < maxBufCount; ++i)
2216 {
2217 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2218 BufferInfo newBufInfo;
2219 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2220 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002221 TEST(res == VK_SUCCESS);
2222 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002223 bufInfo.push_back(newBufInfo);
2224 prevOffset = allocInfo.offset;
2225 }
2226
2227 // Destroy few buffers from top of the stack.
2228 for(size_t i = 0; i < maxBufCount / 5; ++i)
2229 {
2230 const BufferInfo& currBufInfo = bufInfo.back();
2231 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2232 bufInfo.pop_back();
2233 }
2234
2235 // Create some more
2236 for(size_t i = 0; i < maxBufCount / 5; ++i)
2237 {
2238 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2239 BufferInfo newBufInfo;
2240 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2241 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002242 TEST(res == VK_SUCCESS);
2243 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002244 bufInfo.push_back(newBufInfo);
2245 prevOffset = allocInfo.offset;
2246 }
2247
2248 // Destroy the buffers in reverse order.
2249 while(!bufInfo.empty())
2250 {
2251 const BufferInfo& currBufInfo = bufInfo.back();
2252 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2253 bufInfo.pop_back();
2254 }
2255 }
2256
Adam Sawickiee082772018-06-20 17:45:49 +02002257 // Test ring buffer.
2258 {
2259 // Allocate number of buffers that surely fit into this block.
2260 bufCreateInfo.size = bufSizeMax;
2261 for(size_t i = 0; i < maxBufCount; ++i)
2262 {
2263 BufferInfo newBufInfo;
2264 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2265 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002266 TEST(res == VK_SUCCESS);
2267 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02002268 bufInfo.push_back(newBufInfo);
2269 prevOffset = allocInfo.offset;
2270 }
2271
2272 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
2273 const size_t buffersPerIter = maxBufCount / 10 - 1;
2274 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
2275 for(size_t iter = 0; iter < iterCount; ++iter)
2276 {
2277 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2278 {
2279 const BufferInfo& currBufInfo = bufInfo.front();
2280 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2281 bufInfo.erase(bufInfo.begin());
2282 }
2283 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
2284 {
2285 BufferInfo newBufInfo;
2286 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2287 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002288 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02002289 bufInfo.push_back(newBufInfo);
2290 }
2291 }
2292
2293 // Allocate buffers until we reach out-of-memory.
2294 uint32_t debugIndex = 0;
2295 while(res == VK_SUCCESS)
2296 {
2297 BufferInfo newBufInfo;
2298 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2299 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2300 if(res == VK_SUCCESS)
2301 {
2302 bufInfo.push_back(newBufInfo);
2303 }
2304 else
2305 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002306 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02002307 }
2308 ++debugIndex;
2309 }
2310
2311 // Destroy the buffers in random order.
2312 while(!bufInfo.empty())
2313 {
2314 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2315 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2316 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2317 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2318 }
2319 }
2320
Adam Sawicki680b2252018-08-22 14:47:32 +02002321 // Test double stack.
2322 {
2323 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
2324 VkDeviceSize prevOffsetLower = 0;
2325 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
2326 for(size_t i = 0; i < maxBufCount; ++i)
2327 {
2328 const bool upperAddress = (i % 2) != 0;
2329 if(upperAddress)
2330 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2331 else
2332 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2333 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2334 BufferInfo newBufInfo;
2335 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2336 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002337 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002338 if(upperAddress)
2339 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002340 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002341 prevOffsetUpper = allocInfo.offset;
2342 }
2343 else
2344 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002345 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002346 prevOffsetLower = allocInfo.offset;
2347 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002348 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002349 bufInfo.push_back(newBufInfo);
2350 }
2351
2352 // Destroy few buffers from top of the stack.
2353 for(size_t i = 0; i < maxBufCount / 5; ++i)
2354 {
2355 const BufferInfo& currBufInfo = bufInfo.back();
2356 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2357 bufInfo.pop_back();
2358 }
2359
2360 // Create some more
2361 for(size_t i = 0; i < maxBufCount / 5; ++i)
2362 {
2363 const bool upperAddress = (i % 2) != 0;
2364 if(upperAddress)
2365 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2366 else
2367 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2368 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2369 BufferInfo newBufInfo;
2370 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2371 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002372 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02002373 bufInfo.push_back(newBufInfo);
2374 }
2375
2376 // Destroy the buffers in reverse order.
2377 while(!bufInfo.empty())
2378 {
2379 const BufferInfo& currBufInfo = bufInfo.back();
2380 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2381 bufInfo.pop_back();
2382 }
2383
2384 // Create buffers on both sides until we reach out of memory.
2385 prevOffsetLower = 0;
2386 prevOffsetUpper = poolCreateInfo.blockSize;
2387 res = VK_SUCCESS;
2388 for(size_t i = 0; res == VK_SUCCESS; ++i)
2389 {
2390 const bool upperAddress = (i % 2) != 0;
2391 if(upperAddress)
2392 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2393 else
2394 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2395 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2396 BufferInfo newBufInfo;
2397 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2398 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2399 if(res == VK_SUCCESS)
2400 {
2401 if(upperAddress)
2402 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002403 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002404 prevOffsetUpper = allocInfo.offset;
2405 }
2406 else
2407 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002408 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02002409 prevOffsetLower = allocInfo.offset;
2410 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002411 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002412 bufInfo.push_back(newBufInfo);
2413 }
2414 }
2415
2416 // Destroy the buffers in random order.
2417 while(!bufInfo.empty())
2418 {
2419 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2420 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2421 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2422 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2423 }
2424
2425 // Create buffers on upper side only, constant size, until we reach out of memory.
2426 prevOffsetUpper = poolCreateInfo.blockSize;
2427 res = VK_SUCCESS;
2428 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2429 bufCreateInfo.size = bufSizeMax;
2430 for(size_t i = 0; res == VK_SUCCESS; ++i)
2431 {
2432 BufferInfo newBufInfo;
2433 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2434 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2435 if(res == VK_SUCCESS)
2436 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002437 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02002438 prevOffsetUpper = allocInfo.offset;
2439 bufInfo.push_back(newBufInfo);
2440 }
2441 }
2442
2443 // Destroy the buffers in reverse order.
2444 while(!bufInfo.empty())
2445 {
2446 const BufferInfo& currBufInfo = bufInfo.back();
2447 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2448 bufInfo.pop_back();
2449 }
2450 }
2451
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002452 // Test ring buffer with lost allocations.
2453 {
2454 // Allocate number of buffers until pool is full.
2455 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
2456 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
2457 res = VK_SUCCESS;
2458 for(size_t i = 0; res == VK_SUCCESS; ++i)
2459 {
2460 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2461
2462 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2463
2464 BufferInfo newBufInfo;
2465 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2466 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2467 if(res == VK_SUCCESS)
2468 bufInfo.push_back(newBufInfo);
2469 }
2470
2471 // Free first half of it.
2472 {
2473 const size_t buffersToDelete = bufInfo.size() / 2;
2474 for(size_t i = 0; i < buffersToDelete; ++i)
2475 {
2476 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2477 }
2478 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
2479 }
2480
2481 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002482 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002483 res = VK_SUCCESS;
2484 for(size_t i = 0; res == VK_SUCCESS; ++i)
2485 {
2486 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2487
2488 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2489
2490 BufferInfo newBufInfo;
2491 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2492 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
2493 if(res == VK_SUCCESS)
2494 bufInfo.push_back(newBufInfo);
2495 }
2496
2497 VkDeviceSize firstNewOffset;
2498 {
2499 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2500
2501 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
2502 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
2503 bufCreateInfo.size = bufSizeMax;
2504
2505 BufferInfo newBufInfo;
2506 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2507 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002508 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002509 bufInfo.push_back(newBufInfo);
2510 firstNewOffset = allocInfo.offset;
2511
2512 // Make sure at least one buffer from the beginning became lost.
2513 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002514 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002515 }
2516
2517 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
2518 size_t newCount = 1;
2519 for(;;)
2520 {
2521 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2522
2523 bufCreateInfo.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2524
2525 BufferInfo newBufInfo;
2526 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2527 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002528 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002529 bufInfo.push_back(newBufInfo);
2530 ++newCount;
2531 if(allocInfo.offset < firstNewOffset)
2532 break;
2533 }
2534
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002535 // Delete buffers that are lost.
2536 for(size_t i = bufInfo.size(); i--; )
2537 {
2538 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2539 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2540 {
2541 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2542 bufInfo.erase(bufInfo.begin() + i);
2543 }
2544 }
2545
2546 // Test vmaMakePoolAllocationsLost
2547 {
2548 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
2549
2550 size_t lostAllocCount = SIZE_MAX;
2551 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002552 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002553
2554 size_t realLostAllocCount = 0;
2555 for(size_t i = 0; i < bufInfo.size(); ++i)
2556 {
2557 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
2558 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
2559 ++realLostAllocCount;
2560 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02002561 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02002562 }
2563
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02002564 // Destroy all the buffers in forward order.
2565 for(size_t i = 0; i < bufInfo.size(); ++i)
2566 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
2567 bufInfo.clear();
2568 }
2569
Adam Sawicki70a683e2018-08-24 15:36:32 +02002570 vmaDestroyPool(g_hAllocator, pool);
2571}
Adam Sawickif799c4f2018-08-23 10:40:30 +02002572
Adam Sawicki70a683e2018-08-24 15:36:32 +02002573static void TestLinearAllocatorMultiBlock()
2574{
2575 wprintf(L"Test linear allocator multi block\n");
2576
2577 RandomNumberGenerator rand{345673};
2578
2579 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2580 sampleBufCreateInfo.size = 1024 * 1024;
2581 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2582
2583 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2584 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2585
2586 VmaPoolCreateInfo poolCreateInfo = {};
2587 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2588 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002589 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002590
2591 VmaPool pool = nullptr;
2592 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002593 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002594
2595 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2596
2597 VmaAllocationCreateInfo allocCreateInfo = {};
2598 allocCreateInfo.pool = pool;
2599
2600 std::vector<BufferInfo> bufInfo;
2601 VmaAllocationInfo allocInfo;
2602
2603 // Test one-time free.
2604 {
2605 // Allocate buffers until we move to a second block.
2606 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2607 for(uint32_t i = 0; ; ++i)
2608 {
2609 BufferInfo newBufInfo;
2610 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2611 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002612 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002613 bufInfo.push_back(newBufInfo);
2614 if(lastMem && allocInfo.deviceMemory != lastMem)
2615 {
2616 break;
2617 }
2618 lastMem = allocInfo.deviceMemory;
2619 }
2620
Adam Sawickib8d34d52018-10-03 17:41:20 +02002621 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002622
2623 // Make sure that pool has now two blocks.
2624 VmaPoolStats poolStats = {};
2625 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002626 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002627
2628 // Destroy all the buffers in random order.
2629 while(!bufInfo.empty())
2630 {
2631 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
2632 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
2633 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2634 bufInfo.erase(bufInfo.begin() + indexToDestroy);
2635 }
2636
2637 // Make sure that pool has now at most one block.
2638 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002639 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002640 }
2641
2642 // Test stack.
2643 {
2644 // Allocate buffers until we move to a second block.
2645 VkDeviceMemory lastMem = VK_NULL_HANDLE;
2646 for(uint32_t i = 0; ; ++i)
2647 {
2648 BufferInfo newBufInfo;
2649 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2650 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002651 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002652 bufInfo.push_back(newBufInfo);
2653 if(lastMem && allocInfo.deviceMemory != lastMem)
2654 {
2655 break;
2656 }
2657 lastMem = allocInfo.deviceMemory;
2658 }
2659
Adam Sawickib8d34d52018-10-03 17:41:20 +02002660 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002661
2662 // Add few more buffers.
2663 for(uint32_t i = 0; i < 5; ++i)
2664 {
2665 BufferInfo newBufInfo;
2666 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2667 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002668 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002669 bufInfo.push_back(newBufInfo);
2670 }
2671
2672 // Make sure that pool has now two blocks.
2673 VmaPoolStats poolStats = {};
2674 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002675 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002676
2677 // Delete half of buffers, LIFO.
2678 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
2679 {
2680 const BufferInfo& currBufInfo = bufInfo.back();
2681 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2682 bufInfo.pop_back();
2683 }
2684
2685 // Add one more buffer.
2686 BufferInfo newBufInfo;
2687 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2688 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002689 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002690 bufInfo.push_back(newBufInfo);
2691
2692 // Make sure that pool has now one block.
2693 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002694 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02002695
2696 // Delete all the remaining buffers, LIFO.
2697 while(!bufInfo.empty())
2698 {
2699 const BufferInfo& currBufInfo = bufInfo.back();
2700 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2701 bufInfo.pop_back();
2702 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02002703 }
2704
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002705 vmaDestroyPool(g_hAllocator, pool);
2706}
2707
Adam Sawickifd11d752018-08-22 15:02:10 +02002708static void ManuallyTestLinearAllocator()
2709{
2710 VmaStats origStats;
2711 vmaCalculateStats(g_hAllocator, &origStats);
2712
2713 wprintf(L"Manually test linear allocator\n");
2714
2715 RandomNumberGenerator rand{645332};
2716
2717 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2718 sampleBufCreateInfo.size = 1024; // Whatever.
2719 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2720
2721 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2722 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2723
2724 VmaPoolCreateInfo poolCreateInfo = {};
2725 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002726 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002727
2728 poolCreateInfo.blockSize = 10 * 1024;
2729 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2730 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2731
2732 VmaPool pool = nullptr;
2733 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002734 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002735
2736 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2737
2738 VmaAllocationCreateInfo allocCreateInfo = {};
2739 allocCreateInfo.pool = pool;
2740
2741 std::vector<BufferInfo> bufInfo;
2742 VmaAllocationInfo allocInfo;
2743 BufferInfo newBufInfo;
2744
2745 // Test double stack.
2746 {
2747 /*
2748 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
2749 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
2750
2751 Totally:
2752 1 block allocated
2753 10240 Vulkan bytes
2754 6 new allocations
2755 2256 bytes in allocations
2756 */
2757
2758 bufCreateInfo.size = 32;
2759 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2760 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002761 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002762 bufInfo.push_back(newBufInfo);
2763
2764 bufCreateInfo.size = 1024;
2765 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2766 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002767 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002768 bufInfo.push_back(newBufInfo);
2769
2770 bufCreateInfo.size = 32;
2771 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2772 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002773 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002774 bufInfo.push_back(newBufInfo);
2775
2776 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
2777
2778 bufCreateInfo.size = 128;
2779 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2780 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002781 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002782 bufInfo.push_back(newBufInfo);
2783
2784 bufCreateInfo.size = 1024;
2785 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2786 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002787 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002788 bufInfo.push_back(newBufInfo);
2789
2790 bufCreateInfo.size = 16;
2791 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
2792 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002793 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02002794 bufInfo.push_back(newBufInfo);
2795
2796 VmaStats currStats;
2797 vmaCalculateStats(g_hAllocator, &currStats);
2798 VmaPoolStats poolStats;
2799 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
2800
2801 char* statsStr = nullptr;
2802 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
2803
2804 // PUT BREAKPOINT HERE TO CHECK.
2805 // Inspect: currStats versus origStats, poolStats, statsStr.
2806 int I = 0;
2807
2808 vmaFreeStatsString(g_hAllocator, statsStr);
2809
2810 // Destroy the buffers in reverse order.
2811 while(!bufInfo.empty())
2812 {
2813 const BufferInfo& currBufInfo = bufInfo.back();
2814 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
2815 bufInfo.pop_back();
2816 }
2817 }
2818
2819 vmaDestroyPool(g_hAllocator, pool);
2820}
2821
Adam Sawicki80927152018-09-07 17:27:23 +02002822static void BenchmarkAlgorithmsCase(FILE* file,
2823 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002824 bool empty,
2825 VmaAllocationCreateFlags allocStrategy,
2826 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02002827{
2828 RandomNumberGenerator rand{16223};
2829
2830 const VkDeviceSize bufSizeMin = 32;
2831 const VkDeviceSize bufSizeMax = 1024;
2832 const size_t maxBufCapacity = 10000;
2833 const uint32_t iterationCount = 10;
2834
2835 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2836 sampleBufCreateInfo.size = bufSizeMax;
2837 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2838
2839 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2840 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2841
2842 VmaPoolCreateInfo poolCreateInfo = {};
2843 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002844 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002845
2846 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02002847 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02002848 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2849
2850 VmaPool pool = nullptr;
2851 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002852 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002853
2854 // Buffer created just to get memory requirements. Never bound to any memory.
2855 VkBuffer dummyBuffer = VK_NULL_HANDLE;
2856 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002857 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02002858
2859 VkMemoryRequirements memReq = {};
2860 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
2861
2862 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
2863
2864 VmaAllocationCreateInfo allocCreateInfo = {};
2865 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002866 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02002867
2868 VmaAllocation alloc;
2869 std::vector<VmaAllocation> baseAllocations;
2870
2871 if(!empty)
2872 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002873 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02002874 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002875 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02002876 {
2877 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2878 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002879 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002880 baseAllocations.push_back(alloc);
2881 totalSize += memReq.size;
2882 }
2883
2884 // Delete half of them, choose randomly.
2885 size_t allocsToDelete = baseAllocations.size() / 2;
2886 for(size_t i = 0; i < allocsToDelete; ++i)
2887 {
2888 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
2889 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
2890 baseAllocations.erase(baseAllocations.begin() + index);
2891 }
2892 }
2893
2894 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02002895 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02002896 std::vector<VmaAllocation> testAllocations;
2897 testAllocations.reserve(allocCount);
2898 duration allocTotalDuration = duration::zero();
2899 duration freeTotalDuration = duration::zero();
2900 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
2901 {
2902 // Allocations
2903 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
2904 for(size_t i = 0; i < allocCount; ++i)
2905 {
2906 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
2907 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002908 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02002909 testAllocations.push_back(alloc);
2910 }
2911 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
2912
2913 // Deallocations
2914 switch(freeOrder)
2915 {
2916 case FREE_ORDER::FORWARD:
2917 // Leave testAllocations unchanged.
2918 break;
2919 case FREE_ORDER::BACKWARD:
2920 std::reverse(testAllocations.begin(), testAllocations.end());
2921 break;
2922 case FREE_ORDER::RANDOM:
2923 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
2924 break;
2925 default: assert(0);
2926 }
2927
2928 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
2929 for(size_t i = 0; i < allocCount; ++i)
2930 vmaFreeMemory(g_hAllocator, testAllocations[i]);
2931 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
2932
2933 testAllocations.clear();
2934 }
2935
2936 // Delete baseAllocations
2937 while(!baseAllocations.empty())
2938 {
2939 vmaFreeMemory(g_hAllocator, baseAllocations.back());
2940 baseAllocations.pop_back();
2941 }
2942
2943 vmaDestroyPool(g_hAllocator, pool);
2944
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002945 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
2946 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
2947
Adam Sawicki80927152018-09-07 17:27:23 +02002948 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
2949 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02002950 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002951 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02002952 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002953 allocTotalSeconds,
2954 freeTotalSeconds);
2955
2956 if(file)
2957 {
2958 std::string currTime;
2959 CurrentTimeToStr(currTime);
2960
Adam Sawicki80927152018-09-07 17:27:23 +02002961 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002962 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02002963 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002964 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002965 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002966 FREE_ORDER_NAMES[(uint32_t)freeOrder],
2967 allocTotalSeconds,
2968 freeTotalSeconds);
2969 }
Adam Sawicki0a607132018-08-24 11:18:41 +02002970}
2971
Adam Sawicki80927152018-09-07 17:27:23 +02002972static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02002973{
Adam Sawicki80927152018-09-07 17:27:23 +02002974 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02002975
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002976 if(file)
2977 {
2978 fprintf(file,
2979 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02002980 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02002981 "Allocation time (s),Deallocation time (s)\n");
2982 }
2983
Adam Sawicki0a607132018-08-24 11:18:41 +02002984 uint32_t freeOrderCount = 1;
2985 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
2986 freeOrderCount = 3;
2987 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
2988 freeOrderCount = 2;
2989
2990 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02002991 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02002992
2993 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
2994 {
2995 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
2996 switch(freeOrderIndex)
2997 {
2998 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
2999 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3000 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3001 default: assert(0);
3002 }
3003
3004 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3005 {
Adam Sawicki80927152018-09-07 17:27:23 +02003006 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003007 {
Adam Sawicki80927152018-09-07 17:27:23 +02003008 uint32_t algorithm = 0;
3009 switch(algorithmIndex)
3010 {
3011 case 0:
3012 break;
3013 case 1:
3014 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3015 break;
3016 case 2:
3017 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3018 break;
3019 default:
3020 assert(0);
3021 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003022
Adam Sawicki80927152018-09-07 17:27:23 +02003023 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003024 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3025 {
3026 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003027 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003028 {
3029 switch(allocStrategyIndex)
3030 {
3031 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3032 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3033 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3034 default: assert(0);
3035 }
3036 }
3037
Adam Sawicki80927152018-09-07 17:27:23 +02003038 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003039 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003040 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003041 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003042 strategy,
3043 freeOrder); // freeOrder
3044 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003045 }
3046 }
3047 }
3048}
3049
Adam Sawickib8333fb2018-03-13 16:15:53 +01003050static void TestPool_SameSize()
3051{
3052 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3053 const size_t BUF_COUNT = 100;
3054 VkResult res;
3055
3056 RandomNumberGenerator rand{123};
3057
3058 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3059 bufferInfo.size = BUF_SIZE;
3060 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3061
3062 uint32_t memoryTypeBits = UINT32_MAX;
3063 {
3064 VkBuffer dummyBuffer;
3065 res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003066 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003067
3068 VkMemoryRequirements memReq;
3069 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3070 memoryTypeBits = memReq.memoryTypeBits;
3071
3072 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3073 }
3074
3075 VmaAllocationCreateInfo poolAllocInfo = {};
3076 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3077 uint32_t memTypeIndex;
3078 res = vmaFindMemoryTypeIndex(
3079 g_hAllocator,
3080 memoryTypeBits,
3081 &poolAllocInfo,
3082 &memTypeIndex);
3083
3084 VmaPoolCreateInfo poolCreateInfo = {};
3085 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3086 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3087 poolCreateInfo.minBlockCount = 1;
3088 poolCreateInfo.maxBlockCount = 4;
3089 poolCreateInfo.frameInUseCount = 0;
3090
3091 VmaPool pool;
3092 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003093 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003094
3095 vmaSetCurrentFrameIndex(g_hAllocator, 1);
3096
3097 VmaAllocationCreateInfo allocInfo = {};
3098 allocInfo.pool = pool;
3099 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3100 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3101
3102 struct BufItem
3103 {
3104 VkBuffer Buf;
3105 VmaAllocation Alloc;
3106 };
3107 std::vector<BufItem> items;
3108
3109 // Fill entire pool.
3110 for(size_t i = 0; i < BUF_COUNT; ++i)
3111 {
3112 BufItem item;
3113 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003114 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003115 items.push_back(item);
3116 }
3117
3118 // Make sure that another allocation would fail.
3119 {
3120 BufItem item;
3121 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003122 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003123 }
3124
3125 // Validate that no buffer is lost. Also check that they are not mapped.
3126 for(size_t i = 0; i < items.size(); ++i)
3127 {
3128 VmaAllocationInfo allocInfo;
3129 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003130 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
3131 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003132 }
3133
3134 // Free some percent of random items.
3135 {
3136 const size_t PERCENT_TO_FREE = 10;
3137 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
3138 for(size_t i = 0; i < itemsToFree; ++i)
3139 {
3140 size_t index = (size_t)rand.Generate() % items.size();
3141 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3142 items.erase(items.begin() + index);
3143 }
3144 }
3145
3146 // Randomly allocate and free items.
3147 {
3148 const size_t OPERATION_COUNT = BUF_COUNT;
3149 for(size_t i = 0; i < OPERATION_COUNT; ++i)
3150 {
3151 bool allocate = rand.Generate() % 2 != 0;
3152 if(allocate)
3153 {
3154 if(items.size() < BUF_COUNT)
3155 {
3156 BufItem item;
3157 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003158 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003159 items.push_back(item);
3160 }
3161 }
3162 else // Free
3163 {
3164 if(!items.empty())
3165 {
3166 size_t index = (size_t)rand.Generate() % items.size();
3167 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
3168 items.erase(items.begin() + index);
3169 }
3170 }
3171 }
3172 }
3173
3174 // Allocate up to maximum.
3175 while(items.size() < BUF_COUNT)
3176 {
3177 BufItem item;
3178 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003179 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003180 items.push_back(item);
3181 }
3182
3183 // Validate that no buffer is lost.
3184 for(size_t i = 0; i < items.size(); ++i)
3185 {
3186 VmaAllocationInfo allocInfo;
3187 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003188 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003189 }
3190
3191 // Next frame.
3192 vmaSetCurrentFrameIndex(g_hAllocator, 2);
3193
3194 // Allocate another BUF_COUNT buffers.
3195 for(size_t i = 0; i < BUF_COUNT; ++i)
3196 {
3197 BufItem item;
3198 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003199 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003200 items.push_back(item);
3201 }
3202
3203 // Make sure the first BUF_COUNT is lost. Delete them.
3204 for(size_t i = 0; i < BUF_COUNT; ++i)
3205 {
3206 VmaAllocationInfo allocInfo;
3207 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003208 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003209 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3210 }
3211 items.erase(items.begin(), items.begin() + BUF_COUNT);
3212
3213 // Validate that no buffer is lost.
3214 for(size_t i = 0; i < items.size(); ++i)
3215 {
3216 VmaAllocationInfo allocInfo;
3217 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003218 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003219 }
3220
3221 // Free one item.
3222 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
3223 items.pop_back();
3224
3225 // Validate statistics.
3226 {
3227 VmaPoolStats poolStats = {};
3228 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003229 TEST(poolStats.allocationCount == items.size());
3230 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
3231 TEST(poolStats.unusedRangeCount == 1);
3232 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
3233 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003234 }
3235
3236 // Free all remaining items.
3237 for(size_t i = items.size(); i--; )
3238 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3239 items.clear();
3240
3241 // Allocate maximum items again.
3242 for(size_t i = 0; i < BUF_COUNT; ++i)
3243 {
3244 BufItem item;
3245 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003246 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003247 items.push_back(item);
3248 }
3249
3250 // Delete every other item.
3251 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
3252 {
3253 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3254 items.erase(items.begin() + i);
3255 }
3256
3257 // Defragment!
3258 {
3259 std::vector<VmaAllocation> allocationsToDefragment(items.size());
3260 for(size_t i = 0; i < items.size(); ++i)
3261 allocationsToDefragment[i] = items[i].Alloc;
3262
3263 VmaDefragmentationStats defragmentationStats;
3264 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003265 TEST(res == VK_SUCCESS);
3266 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003267 }
3268
3269 // Free all remaining items.
3270 for(size_t i = items.size(); i--; )
3271 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3272 items.clear();
3273
3274 ////////////////////////////////////////////////////////////////////////////////
3275 // Test for vmaMakePoolAllocationsLost
3276
3277 // Allocate 4 buffers on frame 10.
3278 vmaSetCurrentFrameIndex(g_hAllocator, 10);
3279 for(size_t i = 0; i < 4; ++i)
3280 {
3281 BufItem item;
3282 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003283 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003284 items.push_back(item);
3285 }
3286
3287 // Touch first 2 of them on frame 11.
3288 vmaSetCurrentFrameIndex(g_hAllocator, 11);
3289 for(size_t i = 0; i < 2; ++i)
3290 {
3291 VmaAllocationInfo allocInfo;
3292 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
3293 }
3294
3295 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
3296 size_t lostCount = 0xDEADC0DE;
3297 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003298 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003299
3300 // Make another call. Now 0 should be lost.
3301 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003302 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003303
3304 // Make another call, with null count. Should not crash.
3305 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
3306
3307 // END: Free all remaining items.
3308 for(size_t i = items.size(); i--; )
3309 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
3310
3311 items.clear();
3312
Adam Sawickid2924172018-06-11 12:48:46 +02003313 ////////////////////////////////////////////////////////////////////////////////
3314 // Test for allocation too large for pool
3315
3316 {
3317 VmaAllocationCreateInfo allocCreateInfo = {};
3318 allocCreateInfo.pool = pool;
3319
3320 VkMemoryRequirements memReq;
3321 memReq.memoryTypeBits = UINT32_MAX;
3322 memReq.alignment = 1;
3323 memReq.size = poolCreateInfo.blockSize + 4;
3324
3325 VmaAllocation alloc = nullptr;
3326 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003327 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02003328 }
3329
Adam Sawickib8333fb2018-03-13 16:15:53 +01003330 vmaDestroyPool(g_hAllocator, pool);
3331}
3332
Adam Sawickib0c36362018-11-13 16:17:38 +01003333static void TestResize()
3334{
3335 wprintf(L"Testing vmaResizeAllocation...\n");
3336
3337 const VkDeviceSize KILOBYTE = 1024ull;
3338 const VkDeviceSize MEGABYTE = KILOBYTE * 1024;
3339
3340 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3341 bufCreateInfo.size = 2 * MEGABYTE;
3342 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3343
3344 VmaAllocationCreateInfo allocCreateInfo = {};
3345 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3346
3347 uint32_t memTypeIndex = UINT32_MAX;
3348 TEST( vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &memTypeIndex) == VK_SUCCESS );
3349
3350 VmaPoolCreateInfo poolCreateInfo = {};
3351 poolCreateInfo.flags = VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT;
3352 poolCreateInfo.blockSize = 8 * MEGABYTE;
3353 poolCreateInfo.minBlockCount = 1;
3354 poolCreateInfo.maxBlockCount = 1;
3355 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3356
3357 VmaPool pool;
3358 TEST( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) == VK_SUCCESS );
3359
3360 allocCreateInfo.pool = pool;
3361
3362 // Fill 8 MB pool with 4 * 2 MB allocations.
3363 VmaAllocation allocs[4] = {};
3364
3365 VkMemoryRequirements memReq = {};
3366 memReq.memoryTypeBits = UINT32_MAX;
3367 memReq.alignment = 4;
3368 memReq.size = bufCreateInfo.size;
3369
3370 VmaAllocationInfo allocInfo = {};
3371
3372 for(uint32_t i = 0; i < 4; ++i)
3373 {
3374 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &allocs[i], nullptr) == VK_SUCCESS );
3375 }
3376
3377 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 2MB
3378
3379 // Case: Resize to the same size always succeeds.
3380 {
3381 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS);
3382 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3383 TEST(allocInfo.size == 2ull * 1024 * 1024);
3384 }
3385
3386 // Case: Shrink allocation at the end.
3387 {
3388 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3389 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3390 TEST(allocInfo.size == 1ull * 1024 * 1024);
3391 }
3392
3393 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3394
3395 // Case: Shrink allocation before free space.
3396 {
3397 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 512 * KILOBYTE) == VK_SUCCESS );
3398 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3399 TEST(allocInfo.size == 512 * KILOBYTE);
3400 }
3401
3402 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3403
3404 // Case: Shrink allocation before next allocation.
3405 {
3406 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 1 * MEGABYTE) == VK_SUCCESS );
3407 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3408 TEST(allocInfo.size == 1 * MEGABYTE);
3409 }
3410
3411 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 0.5MB, free 1.5MB
3412
3413 // Case: Grow allocation while there is even more space available.
3414 {
3415 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 1 * MEGABYTE) == VK_SUCCESS );
3416 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3417 TEST(allocInfo.size == 1 * MEGABYTE);
3418 }
3419
3420 // Now it's: a0 1MB, free 1 MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3421
3422 // Case: Grow allocation while there is exact amount of free space available.
3423 {
3424 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 2 * MEGABYTE) == VK_SUCCESS );
3425 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3426 TEST(allocInfo.size == 2 * MEGABYTE);
3427 }
3428
3429 // Now it's: a0 2MB, a1 2MB, a2 2MB, a3 1MB, free 1MB
3430
3431 // Case: Fail to grow when there is not enough free space due to next allocation.
3432 {
3433 TEST( vmaResizeAllocation(g_hAllocator, allocs[0], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3434 vmaGetAllocationInfo(g_hAllocator, allocs[0], &allocInfo);
3435 TEST(allocInfo.size == 2 * MEGABYTE);
3436 }
3437
3438 // Case: Fail to grow when there is not enough free space due to end of memory block.
3439 {
3440 TEST( vmaResizeAllocation(g_hAllocator, allocs[3], 3 * MEGABYTE) == VK_ERROR_OUT_OF_POOL_MEMORY );
3441 vmaGetAllocationInfo(g_hAllocator, allocs[3], &allocInfo);
3442 TEST(allocInfo.size == 1 * MEGABYTE);
3443 }
3444
3445 for(uint32_t i = 4; i--; )
3446 {
3447 vmaFreeMemory(g_hAllocator, allocs[i]);
3448 }
3449
3450 vmaDestroyPool(g_hAllocator, pool);
3451
3452 // Test dedicated allocation
3453 {
3454 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
3455 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3456 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3457
3458 VmaAllocation dedicatedAlloc = VK_NULL_HANDLE;
3459 TEST( vmaAllocateMemory(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, &dedicatedAlloc, nullptr) == VK_SUCCESS );
3460
3461 // Case: Resize to the same size always succeeds.
3462 {
3463 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 2 * MEGABYTE) == VK_SUCCESS);
3464 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3465 TEST(allocInfo.size == 2ull * 1024 * 1024);
3466 }
3467
3468 // Case: Shrinking fails.
3469 {
3470 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 1 * MEGABYTE) < VK_SUCCESS);
3471 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3472 TEST(allocInfo.size == 2ull * 1024 * 1024);
3473 }
3474
3475 // Case: Growing fails.
3476 {
3477 TEST( vmaResizeAllocation(g_hAllocator, dedicatedAlloc, 3 * MEGABYTE) < VK_SUCCESS);
3478 vmaGetAllocationInfo(g_hAllocator, dedicatedAlloc, &allocInfo);
3479 TEST(allocInfo.size == 2ull * 1024 * 1024);
3480 }
3481
3482 vmaFreeMemory(g_hAllocator, dedicatedAlloc);
3483 }
3484}
3485
Adam Sawickie44c6262018-06-15 14:30:39 +02003486static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
3487{
3488 const uint8_t* pBytes = (const uint8_t*)pMemory;
3489 for(size_t i = 0; i < size; ++i)
3490 {
3491 if(pBytes[i] != pattern)
3492 {
3493 return false;
3494 }
3495 }
3496 return true;
3497}
3498
3499static void TestAllocationsInitialization()
3500{
3501 VkResult res;
3502
3503 const size_t BUF_SIZE = 1024;
3504
3505 // Create pool.
3506
3507 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3508 bufInfo.size = BUF_SIZE;
3509 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3510
3511 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
3512 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3513
3514 VmaPoolCreateInfo poolCreateInfo = {};
3515 poolCreateInfo.blockSize = BUF_SIZE * 10;
3516 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
3517 poolCreateInfo.maxBlockCount = 1;
3518 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003519 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003520
3521 VmaAllocationCreateInfo bufAllocCreateInfo = {};
3522 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003523 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003524
3525 // Create one persistently mapped buffer to keep memory of this block mapped,
3526 // so that pointer to mapped data will remain (more or less...) valid even
3527 // after destruction of other allocations.
3528
3529 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
3530 VkBuffer firstBuf;
3531 VmaAllocation firstAlloc;
3532 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003533 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003534
3535 // Test buffers.
3536
3537 for(uint32_t i = 0; i < 2; ++i)
3538 {
3539 const bool persistentlyMapped = i == 0;
3540 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
3541 VkBuffer buf;
3542 VmaAllocation alloc;
3543 VmaAllocationInfo allocInfo;
3544 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003545 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003546
3547 void* pMappedData;
3548 if(!persistentlyMapped)
3549 {
3550 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003551 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02003552 }
3553 else
3554 {
3555 pMappedData = allocInfo.pMappedData;
3556 }
3557
3558 // Validate initialized content
3559 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003560 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003561
3562 if(!persistentlyMapped)
3563 {
3564 vmaUnmapMemory(g_hAllocator, alloc);
3565 }
3566
3567 vmaDestroyBuffer(g_hAllocator, buf, alloc);
3568
3569 // Validate freed content
3570 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003571 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02003572 }
3573
3574 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
3575 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
3576}
3577
Adam Sawickib8333fb2018-03-13 16:15:53 +01003578static void TestPool_Benchmark(
3579 PoolTestResult& outResult,
3580 const PoolTestConfig& config)
3581{
Adam Sawickib8d34d52018-10-03 17:41:20 +02003582 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003583
3584 RandomNumberGenerator mainRand{config.RandSeed};
3585
3586 uint32_t allocationSizeProbabilitySum = std::accumulate(
3587 config.AllocationSizes.begin(),
3588 config.AllocationSizes.end(),
3589 0u,
3590 [](uint32_t sum, const AllocationSize& allocSize) {
3591 return sum + allocSize.Probability;
3592 });
3593
3594 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3595 bufferInfo.size = 256; // Whatever.
3596 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3597
3598 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
3599 imageInfo.imageType = VK_IMAGE_TYPE_2D;
3600 imageInfo.extent.width = 256; // Whatever.
3601 imageInfo.extent.height = 256; // Whatever.
3602 imageInfo.extent.depth = 1;
3603 imageInfo.mipLevels = 1;
3604 imageInfo.arrayLayers = 1;
3605 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
3606 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
3607 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
3608 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
3609 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
3610
3611 uint32_t bufferMemoryTypeBits = UINT32_MAX;
3612 {
3613 VkBuffer dummyBuffer;
3614 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, nullptr, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003615 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003616
3617 VkMemoryRequirements memReq;
3618 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3619 bufferMemoryTypeBits = memReq.memoryTypeBits;
3620
3621 vkDestroyBuffer(g_hDevice, dummyBuffer, nullptr);
3622 }
3623
3624 uint32_t imageMemoryTypeBits = UINT32_MAX;
3625 {
3626 VkImage dummyImage;
3627 VkResult res = vkCreateImage(g_hDevice, &imageInfo, nullptr, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003628 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003629
3630 VkMemoryRequirements memReq;
3631 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
3632 imageMemoryTypeBits = memReq.memoryTypeBits;
3633
3634 vkDestroyImage(g_hDevice, dummyImage, nullptr);
3635 }
3636
3637 uint32_t memoryTypeBits = 0;
3638 if(config.UsesBuffers() && config.UsesImages())
3639 {
3640 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
3641 if(memoryTypeBits == 0)
3642 {
3643 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
3644 return;
3645 }
3646 }
3647 else if(config.UsesBuffers())
3648 memoryTypeBits = bufferMemoryTypeBits;
3649 else if(config.UsesImages())
3650 memoryTypeBits = imageMemoryTypeBits;
3651 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02003652 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003653
3654 VmaPoolCreateInfo poolCreateInfo = {};
3655 poolCreateInfo.memoryTypeIndex = 0;
3656 poolCreateInfo.minBlockCount = 1;
3657 poolCreateInfo.maxBlockCount = 1;
3658 poolCreateInfo.blockSize = config.PoolSize;
3659 poolCreateInfo.frameInUseCount = 1;
3660
3661 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
3662 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3663 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
3664
3665 VmaPool pool;
3666 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003667 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003668
3669 // Start time measurement - after creating pool and initializing data structures.
3670 time_point timeBeg = std::chrono::high_resolution_clock::now();
3671
3672 ////////////////////////////////////////////////////////////////////////////////
3673 // ThreadProc
3674 auto ThreadProc = [&](
3675 PoolTestThreadResult* outThreadResult,
3676 uint32_t randSeed,
3677 HANDLE frameStartEvent,
3678 HANDLE frameEndEvent) -> void
3679 {
3680 RandomNumberGenerator threadRand{randSeed};
3681
3682 outThreadResult->AllocationTimeMin = duration::max();
3683 outThreadResult->AllocationTimeSum = duration::zero();
3684 outThreadResult->AllocationTimeMax = duration::min();
3685 outThreadResult->DeallocationTimeMin = duration::max();
3686 outThreadResult->DeallocationTimeSum = duration::zero();
3687 outThreadResult->DeallocationTimeMax = duration::min();
3688 outThreadResult->AllocationCount = 0;
3689 outThreadResult->DeallocationCount = 0;
3690 outThreadResult->LostAllocationCount = 0;
3691 outThreadResult->LostAllocationTotalSize = 0;
3692 outThreadResult->FailedAllocationCount = 0;
3693 outThreadResult->FailedAllocationTotalSize = 0;
3694
3695 struct Item
3696 {
3697 VkDeviceSize BufferSize;
3698 VkExtent2D ImageSize;
3699 VkBuffer Buf;
3700 VkImage Image;
3701 VmaAllocation Alloc;
3702
3703 VkDeviceSize CalcSizeBytes() const
3704 {
3705 return BufferSize +
3706 ImageSize.width * ImageSize.height * 4;
3707 }
3708 };
3709 std::vector<Item> unusedItems, usedItems;
3710
3711 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
3712
3713 // Create all items - all unused, not yet allocated.
3714 for(size_t i = 0; i < threadTotalItemCount; ++i)
3715 {
3716 Item item = {};
3717
3718 uint32_t allocSizeIndex = 0;
3719 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
3720 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
3721 r -= config.AllocationSizes[allocSizeIndex++].Probability;
3722
3723 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
3724 if(allocSize.BufferSizeMax > 0)
3725 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003726 TEST(allocSize.BufferSizeMin > 0);
3727 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003728 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
3729 item.BufferSize = allocSize.BufferSizeMin;
3730 else
3731 {
3732 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
3733 item.BufferSize = item.BufferSize / 16 * 16;
3734 }
3735 }
3736 else
3737 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003738 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003739 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
3740 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
3741 else
3742 {
3743 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3744 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
3745 }
3746 }
3747
3748 unusedItems.push_back(item);
3749 }
3750
3751 auto Allocate = [&](Item& item) -> VkResult
3752 {
3753 VmaAllocationCreateInfo allocCreateInfo = {};
3754 allocCreateInfo.pool = pool;
3755 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
3756 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3757
3758 if(item.BufferSize)
3759 {
3760 bufferInfo.size = item.BufferSize;
3761 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3762 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
3763 }
3764 else
3765 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003766 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003767
3768 imageInfo.extent.width = item.ImageSize.width;
3769 imageInfo.extent.height = item.ImageSize.height;
3770 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3771 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
3772 }
3773 };
3774
3775 ////////////////////////////////////////////////////////////////////////////////
3776 // Frames
3777 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3778 {
3779 WaitForSingleObject(frameStartEvent, INFINITE);
3780
3781 // Always make some percent of used bufs unused, to choose different used ones.
3782 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
3783 for(size_t i = 0; i < bufsToMakeUnused; ++i)
3784 {
3785 size_t index = threadRand.Generate() % usedItems.size();
3786 unusedItems.push_back(usedItems[index]);
3787 usedItems.erase(usedItems.begin() + index);
3788 }
3789
3790 // Determine which bufs we want to use in this frame.
3791 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
3792 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02003793 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01003794 // Move some used to unused.
3795 while(usedBufCount < usedItems.size())
3796 {
3797 size_t index = threadRand.Generate() % usedItems.size();
3798 unusedItems.push_back(usedItems[index]);
3799 usedItems.erase(usedItems.begin() + index);
3800 }
3801 // Move some unused to used.
3802 while(usedBufCount > usedItems.size())
3803 {
3804 size_t index = threadRand.Generate() % unusedItems.size();
3805 usedItems.push_back(unusedItems[index]);
3806 unusedItems.erase(unusedItems.begin() + index);
3807 }
3808
3809 uint32_t touchExistingCount = 0;
3810 uint32_t touchLostCount = 0;
3811 uint32_t createSucceededCount = 0;
3812 uint32_t createFailedCount = 0;
3813
3814 // Touch all used bufs. If not created or lost, allocate.
3815 for(size_t i = 0; i < usedItems.size(); ++i)
3816 {
3817 Item& item = usedItems[i];
3818 // Not yet created.
3819 if(item.Alloc == VK_NULL_HANDLE)
3820 {
3821 res = Allocate(item);
3822 ++outThreadResult->AllocationCount;
3823 if(res != VK_SUCCESS)
3824 {
3825 item.Alloc = VK_NULL_HANDLE;
3826 item.Buf = VK_NULL_HANDLE;
3827 ++outThreadResult->FailedAllocationCount;
3828 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3829 ++createFailedCount;
3830 }
3831 else
3832 ++createSucceededCount;
3833 }
3834 else
3835 {
3836 // Touch.
3837 VmaAllocationInfo allocInfo;
3838 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
3839 // Lost.
3840 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3841 {
3842 ++touchLostCount;
3843
3844 // Destroy.
3845 {
3846 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3847 if(item.Buf)
3848 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
3849 else
3850 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
3851 ++outThreadResult->DeallocationCount;
3852 }
3853 item.Alloc = VK_NULL_HANDLE;
3854 item.Buf = VK_NULL_HANDLE;
3855
3856 ++outThreadResult->LostAllocationCount;
3857 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
3858
3859 // Recreate.
3860 res = Allocate(item);
3861 ++outThreadResult->AllocationCount;
3862 // Creation failed.
3863 if(res != VK_SUCCESS)
3864 {
3865 ++outThreadResult->FailedAllocationCount;
3866 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
3867 ++createFailedCount;
3868 }
3869 else
3870 ++createSucceededCount;
3871 }
3872 else
3873 ++touchExistingCount;
3874 }
3875 }
3876
3877 /*
3878 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
3879 randSeed, frameIndex,
3880 touchExistingCount, touchLostCount,
3881 createSucceededCount, createFailedCount);
3882 */
3883
3884 SetEvent(frameEndEvent);
3885 }
3886
3887 // Free all remaining items.
3888 for(size_t i = usedItems.size(); i--; )
3889 {
3890 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
3891 if(usedItems[i].Buf)
3892 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
3893 else
3894 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
3895 ++outThreadResult->DeallocationCount;
3896 }
3897 for(size_t i = unusedItems.size(); i--; )
3898 {
3899 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
3900 if(unusedItems[i].Buf)
3901 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
3902 else
3903 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
3904 ++outThreadResult->DeallocationCount;
3905 }
3906 };
3907
3908 // Launch threads.
3909 uint32_t threadRandSeed = mainRand.Generate();
3910 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
3911 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
3912 std::vector<std::thread> bkgThreads;
3913 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
3914 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3915 {
3916 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3917 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
3918 bkgThreads.emplace_back(std::bind(
3919 ThreadProc,
3920 &threadResults[threadIndex],
3921 threadRandSeed + threadIndex,
3922 frameStartEvents[threadIndex],
3923 frameEndEvents[threadIndex]));
3924 }
3925
3926 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02003927 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003928 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
3929 {
3930 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
3931 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3932 SetEvent(frameStartEvents[threadIndex]);
3933 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
3934 }
3935
3936 // Wait for threads finished
3937 for(size_t i = 0; i < bkgThreads.size(); ++i)
3938 {
3939 bkgThreads[i].join();
3940 CloseHandle(frameEndEvents[i]);
3941 CloseHandle(frameStartEvents[i]);
3942 }
3943 bkgThreads.clear();
3944
3945 // Finish time measurement - before destroying pool.
3946 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
3947
3948 vmaDestroyPool(g_hAllocator, pool);
3949
3950 outResult.AllocationTimeMin = duration::max();
3951 outResult.AllocationTimeAvg = duration::zero();
3952 outResult.AllocationTimeMax = duration::min();
3953 outResult.DeallocationTimeMin = duration::max();
3954 outResult.DeallocationTimeAvg = duration::zero();
3955 outResult.DeallocationTimeMax = duration::min();
3956 outResult.LostAllocationCount = 0;
3957 outResult.LostAllocationTotalSize = 0;
3958 outResult.FailedAllocationCount = 0;
3959 outResult.FailedAllocationTotalSize = 0;
3960 size_t allocationCount = 0;
3961 size_t deallocationCount = 0;
3962 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
3963 {
3964 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
3965 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
3966 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
3967 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
3968 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
3969 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
3970 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
3971 allocationCount += threadResult.AllocationCount;
3972 deallocationCount += threadResult.DeallocationCount;
3973 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
3974 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
3975 outResult.LostAllocationCount += threadResult.LostAllocationCount;
3976 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
3977 }
3978 if(allocationCount)
3979 outResult.AllocationTimeAvg /= allocationCount;
3980 if(deallocationCount)
3981 outResult.DeallocationTimeAvg /= deallocationCount;
3982}
3983
3984static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
3985{
3986 if(ptr1 < ptr2)
3987 return ptr1 + size1 > ptr2;
3988 else if(ptr2 < ptr1)
3989 return ptr2 + size2 > ptr1;
3990 else
3991 return true;
3992}
3993
3994static void TestMapping()
3995{
3996 wprintf(L"Testing mapping...\n");
3997
3998 VkResult res;
3999 uint32_t memTypeIndex = UINT32_MAX;
4000
4001 enum TEST
4002 {
4003 TEST_NORMAL,
4004 TEST_POOL,
4005 TEST_DEDICATED,
4006 TEST_COUNT
4007 };
4008 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4009 {
4010 VmaPool pool = nullptr;
4011 if(testIndex == TEST_POOL)
4012 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004013 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004014 VmaPoolCreateInfo poolInfo = {};
4015 poolInfo.memoryTypeIndex = memTypeIndex;
4016 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004017 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004018 }
4019
4020 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4021 bufInfo.size = 0x10000;
4022 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4023
4024 VmaAllocationCreateInfo allocCreateInfo = {};
4025 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4026 allocCreateInfo.pool = pool;
4027 if(testIndex == TEST_DEDICATED)
4028 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4029
4030 VmaAllocationInfo allocInfo;
4031
4032 // Mapped manually
4033
4034 // Create 2 buffers.
4035 BufferInfo bufferInfos[3];
4036 for(size_t i = 0; i < 2; ++i)
4037 {
4038 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4039 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004040 TEST(res == VK_SUCCESS);
4041 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004042 memTypeIndex = allocInfo.memoryType;
4043 }
4044
4045 // Map buffer 0.
4046 char* data00 = nullptr;
4047 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004048 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004049 data00[0xFFFF] = data00[0];
4050
4051 // Map buffer 0 second time.
4052 char* data01 = nullptr;
4053 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004054 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004055
4056 // Map buffer 1.
4057 char* data1 = nullptr;
4058 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004059 TEST(res == VK_SUCCESS && data1 != nullptr);
4060 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01004061 data1[0xFFFF] = data1[0];
4062
4063 // Unmap buffer 0 two times.
4064 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4065 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
4066 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004067 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004068
4069 // Unmap buffer 1.
4070 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
4071 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004072 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004073
4074 // Create 3rd buffer - persistently mapped.
4075 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4076 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
4077 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004078 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004079
4080 // Map buffer 2.
4081 char* data2 = nullptr;
4082 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004083 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004084 data2[0xFFFF] = data2[0];
4085
4086 // Unmap buffer 2.
4087 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
4088 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004089 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004090
4091 // Destroy all buffers.
4092 for(size_t i = 3; i--; )
4093 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
4094
4095 vmaDestroyPool(g_hAllocator, pool);
4096 }
4097}
4098
4099static void TestMappingMultithreaded()
4100{
4101 wprintf(L"Testing mapping multithreaded...\n");
4102
4103 static const uint32_t threadCount = 16;
4104 static const uint32_t bufferCount = 1024;
4105 static const uint32_t threadBufferCount = bufferCount / threadCount;
4106
4107 VkResult res;
4108 volatile uint32_t memTypeIndex = UINT32_MAX;
4109
4110 enum TEST
4111 {
4112 TEST_NORMAL,
4113 TEST_POOL,
4114 TEST_DEDICATED,
4115 TEST_COUNT
4116 };
4117 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
4118 {
4119 VmaPool pool = nullptr;
4120 if(testIndex == TEST_POOL)
4121 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004122 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004123 VmaPoolCreateInfo poolInfo = {};
4124 poolInfo.memoryTypeIndex = memTypeIndex;
4125 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004126 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004127 }
4128
4129 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4130 bufCreateInfo.size = 0x10000;
4131 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4132
4133 VmaAllocationCreateInfo allocCreateInfo = {};
4134 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4135 allocCreateInfo.pool = pool;
4136 if(testIndex == TEST_DEDICATED)
4137 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4138
4139 std::thread threads[threadCount];
4140 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4141 {
4142 threads[threadIndex] = std::thread([=, &memTypeIndex](){
4143 // ======== THREAD FUNCTION ========
4144
4145 RandomNumberGenerator rand{threadIndex};
4146
4147 enum class MODE
4148 {
4149 // Don't map this buffer at all.
4150 DONT_MAP,
4151 // Map and quickly unmap.
4152 MAP_FOR_MOMENT,
4153 // Map and unmap before destruction.
4154 MAP_FOR_LONGER,
4155 // Map two times. Quickly unmap, second unmap before destruction.
4156 MAP_TWO_TIMES,
4157 // Create this buffer as persistently mapped.
4158 PERSISTENTLY_MAPPED,
4159 COUNT
4160 };
4161 std::vector<BufferInfo> bufInfos{threadBufferCount};
4162 std::vector<MODE> bufModes{threadBufferCount};
4163
4164 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
4165 {
4166 BufferInfo& bufInfo = bufInfos[bufferIndex];
4167 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
4168 bufModes[bufferIndex] = mode;
4169
4170 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
4171 if(mode == MODE::PERSISTENTLY_MAPPED)
4172 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
4173
4174 VmaAllocationInfo allocInfo;
4175 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
4176 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004177 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004178
4179 if(memTypeIndex == UINT32_MAX)
4180 memTypeIndex = allocInfo.memoryType;
4181
4182 char* data = nullptr;
4183
4184 if(mode == MODE::PERSISTENTLY_MAPPED)
4185 {
4186 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004187 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004188 }
4189 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
4190 mode == MODE::MAP_TWO_TIMES)
4191 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004192 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004193 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004194 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004195
4196 if(mode == MODE::MAP_TWO_TIMES)
4197 {
4198 char* data2 = nullptr;
4199 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004200 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004201 }
4202 }
4203 else if(mode == MODE::DONT_MAP)
4204 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004205 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004206 }
4207 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004208 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004209
4210 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4211 if(data)
4212 data[0xFFFF] = data[0];
4213
4214 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
4215 {
4216 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
4217
4218 VmaAllocationInfo allocInfo;
4219 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
4220 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02004221 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004222 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004223 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004224 }
4225
4226 switch(rand.Generate() % 3)
4227 {
4228 case 0: Sleep(0); break; // Yield.
4229 case 1: Sleep(10); break; // 10 ms
4230 // default: No sleep.
4231 }
4232
4233 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
4234 if(data)
4235 data[0xFFFF] = data[0];
4236 }
4237
4238 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
4239 {
4240 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
4241 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
4242 {
4243 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
4244
4245 VmaAllocationInfo allocInfo;
4246 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004247 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004248 }
4249
4250 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
4251 }
4252 });
4253 }
4254
4255 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
4256 threads[threadIndex].join();
4257
4258 vmaDestroyPool(g_hAllocator, pool);
4259 }
4260}
4261
4262static void WriteMainTestResultHeader(FILE* file)
4263{
4264 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02004265 "Code,Time,"
4266 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004267 "Total Time (us),"
4268 "Allocation Time Min (us),"
4269 "Allocation Time Avg (us),"
4270 "Allocation Time Max (us),"
4271 "Deallocation Time Min (us),"
4272 "Deallocation Time Avg (us),"
4273 "Deallocation Time Max (us),"
4274 "Total Memory Allocated (B),"
4275 "Free Range Size Avg (B),"
4276 "Free Range Size Max (B)\n");
4277}
4278
4279static void WriteMainTestResult(
4280 FILE* file,
4281 const char* codeDescription,
4282 const char* testDescription,
4283 const Config& config, const Result& result)
4284{
4285 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4286 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4287 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4288 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4289 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4290 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4291 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4292
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004293 std::string currTime;
4294 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004295
4296 fprintf(file,
4297 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01004298 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
4299 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004300 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02004301 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01004302 totalTimeSeconds * 1e6f,
4303 allocationTimeMinSeconds * 1e6f,
4304 allocationTimeAvgSeconds * 1e6f,
4305 allocationTimeMaxSeconds * 1e6f,
4306 deallocationTimeMinSeconds * 1e6f,
4307 deallocationTimeAvgSeconds * 1e6f,
4308 deallocationTimeMaxSeconds * 1e6f,
4309 result.TotalMemoryAllocated,
4310 result.FreeRangeSizeAvg,
4311 result.FreeRangeSizeMax);
4312}
4313
4314static void WritePoolTestResultHeader(FILE* file)
4315{
4316 fprintf(file,
4317 "Code,Test,Time,"
4318 "Config,"
4319 "Total Time (us),"
4320 "Allocation Time Min (us),"
4321 "Allocation Time Avg (us),"
4322 "Allocation Time Max (us),"
4323 "Deallocation Time Min (us),"
4324 "Deallocation Time Avg (us),"
4325 "Deallocation Time Max (us),"
4326 "Lost Allocation Count,"
4327 "Lost Allocation Total Size (B),"
4328 "Failed Allocation Count,"
4329 "Failed Allocation Total Size (B)\n");
4330}
4331
4332static void WritePoolTestResult(
4333 FILE* file,
4334 const char* codeDescription,
4335 const char* testDescription,
4336 const PoolTestConfig& config,
4337 const PoolTestResult& result)
4338{
4339 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
4340 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
4341 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
4342 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
4343 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
4344 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
4345 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
4346
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004347 std::string currTime;
4348 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004349
4350 fprintf(file,
4351 "%s,%s,%s,"
4352 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
4353 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
4354 // General
4355 codeDescription,
4356 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004357 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01004358 // Config
4359 config.ThreadCount,
4360 (unsigned long long)config.PoolSize,
4361 config.FrameCount,
4362 config.TotalItemCount,
4363 config.UsedItemCountMin,
4364 config.UsedItemCountMax,
4365 config.ItemsToMakeUnusedPercent,
4366 // Results
4367 totalTimeSeconds * 1e6f,
4368 allocationTimeMinSeconds * 1e6f,
4369 allocationTimeAvgSeconds * 1e6f,
4370 allocationTimeMaxSeconds * 1e6f,
4371 deallocationTimeMinSeconds * 1e6f,
4372 deallocationTimeAvgSeconds * 1e6f,
4373 deallocationTimeMaxSeconds * 1e6f,
4374 result.LostAllocationCount,
4375 result.LostAllocationTotalSize,
4376 result.FailedAllocationCount,
4377 result.FailedAllocationTotalSize);
4378}
4379
4380static void PerformCustomMainTest(FILE* file)
4381{
4382 Config config{};
4383 config.RandSeed = 65735476;
4384 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
4385 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4386 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4387 config.FreeOrder = FREE_ORDER::FORWARD;
4388 config.ThreadCount = 16;
4389 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02004390 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004391
4392 // Buffers
4393 //config.AllocationSizes.push_back({4, 16, 1024});
4394 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4395
4396 // Images
4397 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4398 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4399
4400 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4401 config.AdditionalOperationCount = 1024;
4402
4403 Result result{};
4404 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004405 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004406 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
4407}
4408
4409static void PerformCustomPoolTest(FILE* file)
4410{
4411 PoolTestConfig config;
4412 config.PoolSize = 100 * 1024 * 1024;
4413 config.RandSeed = 2345764;
4414 config.ThreadCount = 1;
4415 config.FrameCount = 200;
4416 config.ItemsToMakeUnusedPercent = 2;
4417
4418 AllocationSize allocSize = {};
4419 allocSize.BufferSizeMin = 1024;
4420 allocSize.BufferSizeMax = 1024 * 1024;
4421 allocSize.Probability = 1;
4422 config.AllocationSizes.push_back(allocSize);
4423
4424 allocSize.BufferSizeMin = 0;
4425 allocSize.BufferSizeMax = 0;
4426 allocSize.ImageSizeMin = 128;
4427 allocSize.ImageSizeMax = 1024;
4428 allocSize.Probability = 1;
4429 config.AllocationSizes.push_back(allocSize);
4430
4431 config.PoolSize = config.CalcAvgResourceSize() * 200;
4432 config.UsedItemCountMax = 160;
4433 config.TotalItemCount = config.UsedItemCountMax * 10;
4434 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4435
4436 g_MemoryAliasingWarningEnabled = false;
4437 PoolTestResult result = {};
4438 TestPool_Benchmark(result, config);
4439 g_MemoryAliasingWarningEnabled = true;
4440
4441 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
4442}
4443
Adam Sawickib8333fb2018-03-13 16:15:53 +01004444static void PerformMainTests(FILE* file)
4445{
4446 uint32_t repeatCount = 1;
4447 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4448
4449 Config config{};
4450 config.RandSeed = 65735476;
4451 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
4452 config.FreeOrder = FREE_ORDER::FORWARD;
4453
4454 size_t threadCountCount = 1;
4455 switch(ConfigType)
4456 {
4457 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4458 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4459 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
4460 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
4461 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
4462 default: assert(0);
4463 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004464
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02004465 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02004466
Adam Sawickib8333fb2018-03-13 16:15:53 +01004467 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4468 {
4469 std::string desc1;
4470
4471 switch(threadCountIndex)
4472 {
4473 case 0:
4474 desc1 += "1_thread";
4475 config.ThreadCount = 1;
4476 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4477 break;
4478 case 1:
4479 desc1 += "16_threads+0%_common";
4480 config.ThreadCount = 16;
4481 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4482 break;
4483 case 2:
4484 desc1 += "16_threads+50%_common";
4485 config.ThreadCount = 16;
4486 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4487 break;
4488 case 3:
4489 desc1 += "16_threads+100%_common";
4490 config.ThreadCount = 16;
4491 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4492 break;
4493 case 4:
4494 desc1 += "2_threads+0%_common";
4495 config.ThreadCount = 2;
4496 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
4497 break;
4498 case 5:
4499 desc1 += "2_threads+50%_common";
4500 config.ThreadCount = 2;
4501 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
4502 break;
4503 case 6:
4504 desc1 += "2_threads+100%_common";
4505 config.ThreadCount = 2;
4506 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
4507 break;
4508 default:
4509 assert(0);
4510 }
4511
4512 // 0 = buffers, 1 = images, 2 = buffers and images
4513 size_t buffersVsImagesCount = 2;
4514 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4515 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4516 {
4517 std::string desc2 = desc1;
4518 switch(buffersVsImagesIndex)
4519 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004520 case 0: desc2 += ",Buffers"; break;
4521 case 1: desc2 += ",Images"; break;
4522 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004523 default: assert(0);
4524 }
4525
4526 // 0 = small, 1 = large, 2 = small and large
4527 size_t smallVsLargeCount = 2;
4528 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4529 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4530 {
4531 std::string desc3 = desc2;
4532 switch(smallVsLargeIndex)
4533 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004534 case 0: desc3 += ",Small"; break;
4535 case 1: desc3 += ",Large"; break;
4536 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01004537 default: assert(0);
4538 }
4539
4540 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4541 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
4542 else
4543 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
4544
4545 // 0 = varying sizes min...max, 1 = set of constant sizes
4546 size_t constantSizesCount = 1;
4547 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4548 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4549 {
4550 std::string desc4 = desc3;
4551 switch(constantSizesIndex)
4552 {
4553 case 0: desc4 += " Varying_sizes"; break;
4554 case 1: desc4 += " Constant_sizes"; break;
4555 default: assert(0);
4556 }
4557
4558 config.AllocationSizes.clear();
4559 // Buffers present
4560 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4561 {
4562 // Small
4563 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4564 {
4565 // Varying size
4566 if(constantSizesIndex == 0)
4567 config.AllocationSizes.push_back({4, 16, 1024});
4568 // Constant sizes
4569 else
4570 {
4571 config.AllocationSizes.push_back({1, 16, 16});
4572 config.AllocationSizes.push_back({1, 64, 64});
4573 config.AllocationSizes.push_back({1, 256, 256});
4574 config.AllocationSizes.push_back({1, 1024, 1024});
4575 }
4576 }
4577 // Large
4578 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4579 {
4580 // Varying size
4581 if(constantSizesIndex == 0)
4582 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4583 // Constant sizes
4584 else
4585 {
4586 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4587 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4588 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4589 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4590 }
4591 }
4592 }
4593 // Images present
4594 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4595 {
4596 // Small
4597 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4598 {
4599 // Varying size
4600 if(constantSizesIndex == 0)
4601 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4602 // Constant sizes
4603 else
4604 {
4605 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4606 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4607 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4608 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4609 }
4610 }
4611 // Large
4612 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4613 {
4614 // Varying size
4615 if(constantSizesIndex == 0)
4616 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4617 // Constant sizes
4618 else
4619 {
4620 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4621 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4622 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4623 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4624 }
4625 }
4626 }
4627
4628 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
4629 size_t beginBytesToAllocateCount = 1;
4630 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
4631 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
4632 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
4633 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
4634 {
4635 std::string desc5 = desc4;
4636
4637 switch(beginBytesToAllocateIndex)
4638 {
4639 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004640 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004641 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
4642 config.AdditionalOperationCount = 0;
4643 break;
4644 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004645 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004646 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
4647 config.AdditionalOperationCount = 1024;
4648 break;
4649 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004650 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004651 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
4652 config.AdditionalOperationCount = 1024;
4653 break;
4654 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004655 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01004656 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
4657 config.AdditionalOperationCount = 1024;
4658 break;
4659 default:
4660 assert(0);
4661 }
4662
Adam Sawicki0667e332018-08-24 17:26:44 +02004663 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01004664 {
Adam Sawicki0667e332018-08-24 17:26:44 +02004665 std::string desc6 = desc5;
4666 switch(strategyIndex)
4667 {
4668 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004669 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004670 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
4671 break;
4672 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004673 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004674 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
4675 break;
4676 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02004677 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02004678 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
4679 break;
4680 default:
4681 assert(0);
4682 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004683
Adam Sawicki33d2ce72018-08-27 13:59:13 +02004684 desc6 += ',';
4685 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02004686
4687 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02004688
4689 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4690 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004691 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02004692
4693 Result result{};
4694 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004695 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02004696 if(file)
4697 {
4698 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4699 }
Adam Sawicki0667e332018-08-24 17:26:44 +02004700 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01004701 }
4702 }
4703 }
4704 }
4705 }
4706 }
4707}
4708
4709static void PerformPoolTests(FILE* file)
4710{
4711 const size_t AVG_RESOURCES_PER_POOL = 300;
4712
4713 uint32_t repeatCount = 1;
4714 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
4715
4716 PoolTestConfig config{};
4717 config.RandSeed = 2346343;
4718 config.FrameCount = 200;
4719 config.ItemsToMakeUnusedPercent = 2;
4720
4721 size_t threadCountCount = 1;
4722 switch(ConfigType)
4723 {
4724 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
4725 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
4726 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
4727 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
4728 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
4729 default: assert(0);
4730 }
4731 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
4732 {
4733 std::string desc1;
4734
4735 switch(threadCountIndex)
4736 {
4737 case 0:
4738 desc1 += "1_thread";
4739 config.ThreadCount = 1;
4740 break;
4741 case 1:
4742 desc1 += "16_threads";
4743 config.ThreadCount = 16;
4744 break;
4745 case 2:
4746 desc1 += "2_threads";
4747 config.ThreadCount = 2;
4748 break;
4749 default:
4750 assert(0);
4751 }
4752
4753 // 0 = buffers, 1 = images, 2 = buffers and images
4754 size_t buffersVsImagesCount = 2;
4755 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
4756 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
4757 {
4758 std::string desc2 = desc1;
4759 switch(buffersVsImagesIndex)
4760 {
4761 case 0: desc2 += " Buffers"; break;
4762 case 1: desc2 += " Images"; break;
4763 case 2: desc2 += " Buffers+Images"; break;
4764 default: assert(0);
4765 }
4766
4767 // 0 = small, 1 = large, 2 = small and large
4768 size_t smallVsLargeCount = 2;
4769 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
4770 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
4771 {
4772 std::string desc3 = desc2;
4773 switch(smallVsLargeIndex)
4774 {
4775 case 0: desc3 += " Small"; break;
4776 case 1: desc3 += " Large"; break;
4777 case 2: desc3 += " Small+Large"; break;
4778 default: assert(0);
4779 }
4780
4781 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4782 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
4783 else
4784 config.PoolSize = 4ull * 1024 * 1024;
4785
4786 // 0 = varying sizes min...max, 1 = set of constant sizes
4787 size_t constantSizesCount = 1;
4788 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
4789 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
4790 {
4791 std::string desc4 = desc3;
4792 switch(constantSizesIndex)
4793 {
4794 case 0: desc4 += " Varying_sizes"; break;
4795 case 1: desc4 += " Constant_sizes"; break;
4796 default: assert(0);
4797 }
4798
4799 config.AllocationSizes.clear();
4800 // Buffers present
4801 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
4802 {
4803 // Small
4804 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4805 {
4806 // Varying size
4807 if(constantSizesIndex == 0)
4808 config.AllocationSizes.push_back({4, 16, 1024});
4809 // Constant sizes
4810 else
4811 {
4812 config.AllocationSizes.push_back({1, 16, 16});
4813 config.AllocationSizes.push_back({1, 64, 64});
4814 config.AllocationSizes.push_back({1, 256, 256});
4815 config.AllocationSizes.push_back({1, 1024, 1024});
4816 }
4817 }
4818 // Large
4819 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4820 {
4821 // Varying size
4822 if(constantSizesIndex == 0)
4823 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
4824 // Constant sizes
4825 else
4826 {
4827 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
4828 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
4829 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
4830 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
4831 }
4832 }
4833 }
4834 // Images present
4835 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
4836 {
4837 // Small
4838 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
4839 {
4840 // Varying size
4841 if(constantSizesIndex == 0)
4842 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
4843 // Constant sizes
4844 else
4845 {
4846 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
4847 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
4848 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
4849 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
4850 }
4851 }
4852 // Large
4853 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
4854 {
4855 // Varying size
4856 if(constantSizesIndex == 0)
4857 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
4858 // Constant sizes
4859 else
4860 {
4861 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
4862 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
4863 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
4864 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
4865 }
4866 }
4867 }
4868
4869 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
4870 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
4871
4872 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
4873 size_t subscriptionModeCount;
4874 switch(ConfigType)
4875 {
4876 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
4877 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
4878 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
4879 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
4880 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
4881 default: assert(0);
4882 }
4883 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
4884 {
4885 std::string desc5 = desc4;
4886
4887 switch(subscriptionModeIndex)
4888 {
4889 case 0:
4890 desc5 += " Subscription_66%";
4891 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
4892 break;
4893 case 1:
4894 desc5 += " Subscription_133%";
4895 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
4896 break;
4897 case 2:
4898 desc5 += " Subscription_100%";
4899 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
4900 break;
4901 case 3:
4902 desc5 += " Subscription_33%";
4903 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
4904 break;
4905 case 4:
4906 desc5 += " Subscription_166%";
4907 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
4908 break;
4909 default:
4910 assert(0);
4911 }
4912
4913 config.TotalItemCount = config.UsedItemCountMax * 5;
4914 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
4915
4916 const char* testDescription = desc5.c_str();
4917
4918 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
4919 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02004920 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004921
4922 PoolTestResult result{};
4923 g_MemoryAliasingWarningEnabled = false;
4924 TestPool_Benchmark(result, config);
4925 g_MemoryAliasingWarningEnabled = true;
4926 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
4927 }
4928 }
4929 }
4930 }
4931 }
4932 }
4933}
4934
Adam Sawickia83793a2018-09-03 13:40:42 +02004935static void BasicTestBuddyAllocator()
4936{
4937 wprintf(L"Basic test buddy allocator\n");
4938
4939 RandomNumberGenerator rand{76543};
4940
4941 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4942 sampleBufCreateInfo.size = 1024; // Whatever.
4943 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4944
4945 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
4946 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4947
4948 VmaPoolCreateInfo poolCreateInfo = {};
4949 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004950 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004951
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02004952 // Deliberately adding 1023 to test usable size smaller than memory block size.
4953 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02004954 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02004955 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02004956
4957 VmaPool pool = nullptr;
4958 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004959 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004960
4961 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
4962
4963 VmaAllocationCreateInfo allocCreateInfo = {};
4964 allocCreateInfo.pool = pool;
4965
4966 std::vector<BufferInfo> bufInfo;
4967 BufferInfo newBufInfo;
4968 VmaAllocationInfo allocInfo;
4969
4970 bufCreateInfo.size = 1024 * 256;
4971 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4972 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004973 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004974 bufInfo.push_back(newBufInfo);
4975
4976 bufCreateInfo.size = 1024 * 512;
4977 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4978 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004979 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004980 bufInfo.push_back(newBufInfo);
4981
4982 bufCreateInfo.size = 1024 * 128;
4983 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4984 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004985 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02004986 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02004987
4988 // Test very small allocation, smaller than minimum node size.
4989 bufCreateInfo.size = 1;
4990 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
4991 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004992 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02004993 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02004994
Adam Sawicki9933c5c2018-09-21 14:57:24 +02004995 // Test some small allocation with alignment requirement.
4996 {
4997 VkMemoryRequirements memReq;
4998 memReq.alignment = 256;
4999 memReq.memoryTypeBits = UINT32_MAX;
5000 memReq.size = 32;
5001
5002 newBufInfo.Buffer = VK_NULL_HANDLE;
5003 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
5004 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005005 TEST(res == VK_SUCCESS);
5006 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02005007 bufInfo.push_back(newBufInfo);
5008 }
5009
5010 //SaveAllocatorStatsToFile(L"TEST.json");
5011
Adam Sawicki21017c62018-09-07 15:26:59 +02005012 VmaPoolStats stats = {};
5013 vmaGetPoolStats(g_hAllocator, pool, &stats);
5014 int DBG = 0; // Set breakpoint here to inspect `stats`.
5015
Adam Sawicki80927152018-09-07 17:27:23 +02005016 // Allocate enough new buffers to surely fall into second block.
5017 for(uint32_t i = 0; i < 32; ++i)
5018 {
5019 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
5020 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
5021 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005022 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02005023 bufInfo.push_back(newBufInfo);
5024 }
5025
5026 SaveAllocatorStatsToFile(L"BuddyTest01.json");
5027
Adam Sawickia83793a2018-09-03 13:40:42 +02005028 // Destroy the buffers in random order.
5029 while(!bufInfo.empty())
5030 {
5031 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
5032 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
5033 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
5034 bufInfo.erase(bufInfo.begin() + indexToDestroy);
5035 }
5036
5037 vmaDestroyPool(g_hAllocator, pool);
5038}
5039
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005040static void BasicTestAllocatePages()
5041{
5042 wprintf(L"Basic test allocate pages\n");
5043
5044 RandomNumberGenerator rand{765461};
5045
5046 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5047 sampleBufCreateInfo.size = 1024; // Whatever.
5048 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5049
5050 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
5051 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5052
5053 VmaPoolCreateInfo poolCreateInfo = {};
5054 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02005055 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005056
5057 // 1 block of 1 MB.
5058 poolCreateInfo.blockSize = 1024 * 1024;
5059 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
5060
5061 // Create pool.
5062 VmaPool pool = nullptr;
5063 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02005064 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005065
5066 // Make 100 allocations of 4 KB - they should fit into the pool.
5067 VkMemoryRequirements memReq;
5068 memReq.memoryTypeBits = UINT32_MAX;
5069 memReq.alignment = 4 * 1024;
5070 memReq.size = 4 * 1024;
5071
5072 VmaAllocationCreateInfo allocCreateInfo = {};
5073 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5074 allocCreateInfo.pool = pool;
5075
5076 constexpr uint32_t allocCount = 100;
5077
5078 std::vector<VmaAllocation> alloc{allocCount};
5079 std::vector<VmaAllocationInfo> allocInfo{allocCount};
5080 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005081 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005082 for(uint32_t i = 0; i < allocCount; ++i)
5083 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005084 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005085 allocInfo[i].pMappedData != nullptr &&
5086 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
5087 allocInfo[i].memoryType == allocInfo[0].memoryType);
5088 }
5089
5090 // Free the allocations.
5091 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5092 std::fill(alloc.begin(), alloc.end(), nullptr);
5093 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5094
5095 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
5096 // Also test optional allocationInfo = null.
5097 memReq.size = 100 * 1024;
5098 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02005099 TEST(res != VK_SUCCESS);
5100 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005101
5102 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
5103 memReq.size = 4 * 1024;
5104 memReq.alignment = 128 * 1024;
5105 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005106 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005107
5108 // Make 100 dedicated allocations of 4 KB.
5109 memReq.alignment = 4 * 1024;
5110 memReq.size = 4 * 1024;
5111
5112 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
5113 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5114 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5115 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02005116 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005117 for(uint32_t i = 0; i < allocCount; ++i)
5118 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005119 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005120 allocInfo[i].pMappedData != nullptr &&
5121 allocInfo[i].memoryType == allocInfo[0].memoryType &&
5122 allocInfo[i].offset == 0);
5123 if(i > 0)
5124 {
Adam Sawickia7d77692018-10-03 16:15:27 +02005125 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005126 }
5127 }
5128
5129 // Free the allocations.
5130 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
5131 std::fill(alloc.begin(), alloc.end(), nullptr);
5132 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
5133
5134 vmaDestroyPool(g_hAllocator, pool);
5135}
5136
Adam Sawickif2975342018-10-16 13:49:02 +02005137// Test the testing environment.
5138static void TestGpuData()
5139{
5140 RandomNumberGenerator rand = { 53434 };
5141
5142 std::vector<AllocInfo> allocInfo;
5143
5144 for(size_t i = 0; i < 100; ++i)
5145 {
5146 AllocInfo info = {};
5147
5148 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
5149 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
5150 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
5151 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5152 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
5153
5154 VmaAllocationCreateInfo allocCreateInfo = {};
5155 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5156
5157 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
5158 TEST(res == VK_SUCCESS);
5159
5160 info.m_StartValue = rand.Generate();
5161
5162 allocInfo.push_back(std::move(info));
5163 }
5164
5165 UploadGpuData(allocInfo.data(), allocInfo.size());
5166
5167 ValidateGpuData(allocInfo.data(), allocInfo.size());
5168
5169 DestroyAllAllocations(allocInfo);
5170}
5171
Adam Sawickib8333fb2018-03-13 16:15:53 +01005172void Test()
5173{
5174 wprintf(L"TESTING:\n");
5175
Adam Sawicki5c8af7b2018-12-10 13:34:54 +01005176 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02005177 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01005178 ////////////////////////////////////////////////////////////////////////////////
5179 // Temporarily insert custom tests here:
Adam Sawicki80927152018-09-07 17:27:23 +02005180
Adam Sawicki70a683e2018-08-24 15:36:32 +02005181 return;
5182 }
5183
Adam Sawickib8333fb2018-03-13 16:15:53 +01005184 // # Simple tests
5185
5186 TestBasics();
Adam Sawickif2975342018-10-16 13:49:02 +02005187 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02005188#if VMA_DEBUG_MARGIN
5189 TestDebugMargin();
5190#else
5191 TestPool_SameSize();
5192 TestHeapSizeLimit();
Adam Sawickib0c36362018-11-13 16:17:38 +01005193 TestResize();
Adam Sawicki212a4a62018-06-14 15:44:45 +02005194#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02005195#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
5196 TestAllocationsInitialization();
5197#endif
Adam Sawickib8333fb2018-03-13 16:15:53 +01005198 TestMapping();
5199 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02005200 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02005201 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02005202 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005203
Adam Sawicki4338f662018-09-07 14:12:37 +02005204 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02005205 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02005206
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005207 {
5208 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02005209 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005210 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02005211 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005212 fclose(file);
5213 }
5214
Adam Sawickib8333fb2018-03-13 16:15:53 +01005215 TestDefragmentationSimple();
5216 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01005217 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01005218 TestDefragmentationGpu();
Adam Sawickib8333fb2018-03-13 16:15:53 +01005219
5220 // # Detailed tests
5221 FILE* file;
5222 fopen_s(&file, "Results.csv", "w");
5223 assert(file != NULL);
5224
5225 WriteMainTestResultHeader(file);
5226 PerformMainTests(file);
5227 //PerformCustomMainTest(file);
5228
5229 WritePoolTestResultHeader(file);
5230 PerformPoolTests(file);
5231 //PerformCustomPoolTest(file);
5232
5233 fclose(file);
5234
5235 wprintf(L"Done.\n");
5236}
5237
Adam Sawickif1a793c2018-03-13 15:42:22 +01005238#endif // #ifdef _WIN32