blob: e5ec4c1ba8dc0891ca1e0b16875625c9e53d40cc [file] [log] [blame]
Adam Sawickiae5c4662019-01-02 10:23:35 +01001//
Adam Sawicki50882502020-02-07 16:51:31 +01002// Copyright (c) 2017-2020 Advanced Micro Devices, Inc. All rights reserved.
Adam Sawickiae5c4662019-01-02 10:23:35 +01003//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
Adam Sawickif1a793c2018-03-13 15:42:22 +010023#include "Tests.h"
24#include "VmaUsage.h"
25#include "Common.h"
Adam Sawickib8333fb2018-03-13 16:15:53 +010026#include <atomic>
27#include <thread>
28#include <mutex>
Adam Sawicki94ce3d72019-04-17 14:59:25 +020029#include <functional>
Adam Sawickif1a793c2018-03-13 15:42:22 +010030
31#ifdef _WIN32
32
Adam Sawicki33d2ce72018-08-27 13:59:13 +020033static const char* CODE_DESCRIPTION = "Foo";
34
Adam Sawickif2975342018-10-16 13:49:02 +020035extern VkCommandBuffer g_hTemporaryCommandBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +020036extern const VkAllocationCallbacks* g_Allocs;
Adam Sawickie73e9882020-03-20 18:05:42 +010037extern bool g_BufferDeviceAddressEnabled;
Adam Sawickif2012052021-01-11 18:04:42 +010038extern bool VK_EXT_memory_priority_enabled;
Adam Sawickie73e9882020-03-20 18:05:42 +010039extern PFN_vkGetBufferDeviceAddressEXT g_vkGetBufferDeviceAddressEXT;
Adam Sawickif2975342018-10-16 13:49:02 +020040void BeginSingleTimeCommands();
41void EndSingleTimeCommands();
42
Adam Sawickibdb89a92018-12-13 11:56:30 +010043#ifndef VMA_DEBUG_MARGIN
44 #define VMA_DEBUG_MARGIN 0
45#endif
46
Adam Sawicki0a607132018-08-24 11:18:41 +020047enum CONFIG_TYPE {
48 CONFIG_TYPE_MINIMUM,
49 CONFIG_TYPE_SMALL,
50 CONFIG_TYPE_AVERAGE,
51 CONFIG_TYPE_LARGE,
52 CONFIG_TYPE_MAXIMUM,
53 CONFIG_TYPE_COUNT
54};
55
Adam Sawickif2975342018-10-16 13:49:02 +020056static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
57//static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
Adam Sawicki0a607132018-08-24 11:18:41 +020058
Adam Sawickib8333fb2018-03-13 16:15:53 +010059enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
60
Adam Sawicki0667e332018-08-24 17:26:44 +020061static const char* FREE_ORDER_NAMES[] = {
62 "FORWARD",
63 "BACKWARD",
64 "RANDOM",
Adam Sawicki0a607132018-08-24 11:18:41 +020065};
66
Adam Sawicki80927152018-09-07 17:27:23 +020067// Copy of internal VmaAlgorithmToStr.
68static const char* AlgorithmToStr(uint32_t algorithm)
69{
70 switch(algorithm)
71 {
72 case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
73 return "Linear";
74 case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
75 return "Buddy";
76 case 0:
77 return "Default";
78 default:
79 assert(0);
80 return "";
81 }
82}
83
Adam Sawickib8333fb2018-03-13 16:15:53 +010084struct AllocationSize
85{
86 uint32_t Probability;
87 VkDeviceSize BufferSizeMin, BufferSizeMax;
88 uint32_t ImageSizeMin, ImageSizeMax;
89};
90
91struct Config
92{
93 uint32_t RandSeed;
94 VkDeviceSize BeginBytesToAllocate;
95 uint32_t AdditionalOperationCount;
96 VkDeviceSize MaxBytesToAllocate;
97 uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
98 std::vector<AllocationSize> AllocationSizes;
99 uint32_t ThreadCount;
100 uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
101 FREE_ORDER FreeOrder;
Adam Sawicki0667e332018-08-24 17:26:44 +0200102 VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
Adam Sawickib8333fb2018-03-13 16:15:53 +0100103};
104
105struct Result
106{
107 duration TotalTime;
108 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
109 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
110 VkDeviceSize TotalMemoryAllocated;
111 VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
112};
113
114void TestDefragmentationSimple();
115void TestDefragmentationFull();
116
117struct PoolTestConfig
118{
119 uint32_t RandSeed;
120 uint32_t ThreadCount;
121 VkDeviceSize PoolSize;
122 uint32_t FrameCount;
123 uint32_t TotalItemCount;
124 // Range for number of items used in each frame.
125 uint32_t UsedItemCountMin, UsedItemCountMax;
126 // Percent of items to make unused, and possibly make some others used in each frame.
127 uint32_t ItemsToMakeUnusedPercent;
128 std::vector<AllocationSize> AllocationSizes;
129
130 VkDeviceSize CalcAvgResourceSize() const
131 {
132 uint32_t probabilitySum = 0;
133 VkDeviceSize sizeSum = 0;
134 for(size_t i = 0; i < AllocationSizes.size(); ++i)
135 {
136 const AllocationSize& allocSize = AllocationSizes[i];
137 if(allocSize.BufferSizeMax > 0)
138 sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
139 else
140 {
141 const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
142 sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
143 }
144 probabilitySum += allocSize.Probability;
145 }
146 return sizeSum / probabilitySum;
147 }
148
149 bool UsesBuffers() const
150 {
151 for(size_t i = 0; i < AllocationSizes.size(); ++i)
152 if(AllocationSizes[i].BufferSizeMax > 0)
153 return true;
154 return false;
155 }
156
157 bool UsesImages() const
158 {
159 for(size_t i = 0; i < AllocationSizes.size(); ++i)
160 if(AllocationSizes[i].ImageSizeMax > 0)
161 return true;
162 return false;
163 }
164};
165
166struct PoolTestResult
167{
168 duration TotalTime;
169 duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
170 duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
171 size_t LostAllocationCount, LostAllocationTotalSize;
172 size_t FailedAllocationCount, FailedAllocationTotalSize;
173};
174
175static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
176
Adam Sawicki51fa9662018-10-03 13:44:29 +0200177uint32_t g_FrameIndex = 0;
Adam Sawicki8cfe05f2018-08-22 16:48:17 +0200178
Adam Sawickib8333fb2018-03-13 16:15:53 +0100179struct BufferInfo
180{
181 VkBuffer Buffer = VK_NULL_HANDLE;
182 VmaAllocation Allocation = VK_NULL_HANDLE;
183};
184
Adam Sawicki40ffe982019-10-11 15:56:02 +0200185static uint32_t MemoryTypeToHeap(uint32_t memoryTypeIndex)
186{
187 const VkPhysicalDeviceMemoryProperties* props;
188 vmaGetMemoryProperties(g_hAllocator, &props);
189 return props->memoryTypes[memoryTypeIndex].heapIndex;
190}
191
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +0200192static uint32_t GetAllocationStrategyCount()
193{
194 uint32_t strategyCount = 0;
195 switch(ConfigType)
196 {
197 case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
198 case CONFIG_TYPE_SMALL: strategyCount = 1; break;
199 case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
200 case CONFIG_TYPE_LARGE: strategyCount = 2; break;
201 case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
202 default: assert(0);
203 }
204 return strategyCount;
205}
206
207static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
208{
209 switch(allocStrategy)
210 {
211 case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
212 case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
213 case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
214 case 0: return "Default"; break;
215 default: assert(0); return "";
216 }
217}
218
Adam Sawickib8333fb2018-03-13 16:15:53 +0100219static void InitResult(Result& outResult)
220{
221 outResult.TotalTime = duration::zero();
222 outResult.AllocationTimeMin = duration::max();
223 outResult.AllocationTimeAvg = duration::zero();
224 outResult.AllocationTimeMax = duration::min();
225 outResult.DeallocationTimeMin = duration::max();
226 outResult.DeallocationTimeAvg = duration::zero();
227 outResult.DeallocationTimeMax = duration::min();
228 outResult.TotalMemoryAllocated = 0;
229 outResult.FreeRangeSizeAvg = 0;
230 outResult.FreeRangeSizeMax = 0;
231}
232
233class TimeRegisterObj
234{
235public:
236 TimeRegisterObj(duration& min, duration& sum, duration& max) :
237 m_Min(min),
238 m_Sum(sum),
239 m_Max(max),
240 m_TimeBeg(std::chrono::high_resolution_clock::now())
241 {
242 }
243
244 ~TimeRegisterObj()
245 {
246 duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
247 m_Sum += d;
248 if(d < m_Min) m_Min = d;
249 if(d > m_Max) m_Max = d;
250 }
251
252private:
253 duration& m_Min;
254 duration& m_Sum;
255 duration& m_Max;
256 time_point m_TimeBeg;
257};
258
259struct PoolTestThreadResult
260{
261 duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
262 duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
263 size_t AllocationCount, DeallocationCount;
264 size_t LostAllocationCount, LostAllocationTotalSize;
265 size_t FailedAllocationCount, FailedAllocationTotalSize;
266};
267
268class AllocationTimeRegisterObj : public TimeRegisterObj
269{
270public:
271 AllocationTimeRegisterObj(Result& result) :
272 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
273 {
274 }
275};
276
277class DeallocationTimeRegisterObj : public TimeRegisterObj
278{
279public:
280 DeallocationTimeRegisterObj(Result& result) :
281 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
282 {
283 }
284};
285
286class PoolAllocationTimeRegisterObj : public TimeRegisterObj
287{
288public:
289 PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
290 TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
291 {
292 }
293};
294
295class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
296{
297public:
298 PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
299 TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
300 {
301 }
302};
303
Adam Sawicki33d2ce72018-08-27 13:59:13 +0200304static void CurrentTimeToStr(std::string& out)
305{
306 time_t rawTime; time(&rawTime);
307 struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
308 char timeStr[128];
309 strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
310 out = timeStr;
311}
312
Adam Sawickib8333fb2018-03-13 16:15:53 +0100313VkResult MainTest(Result& outResult, const Config& config)
314{
315 assert(config.ThreadCount > 0);
316
317 InitResult(outResult);
318
319 RandomNumberGenerator mainRand{config.RandSeed};
320
321 time_point timeBeg = std::chrono::high_resolution_clock::now();
322
323 std::atomic<size_t> allocationCount = 0;
324 VkResult res = VK_SUCCESS;
325
326 uint32_t memUsageProbabilitySum =
327 config.MemUsageProbability[0] + config.MemUsageProbability[1] +
328 config.MemUsageProbability[2] + config.MemUsageProbability[3];
329 assert(memUsageProbabilitySum > 0);
330
331 uint32_t allocationSizeProbabilitySum = std::accumulate(
332 config.AllocationSizes.begin(),
333 config.AllocationSizes.end(),
334 0u,
335 [](uint32_t sum, const AllocationSize& allocSize) {
336 return sum + allocSize.Probability;
337 });
338
339 struct Allocation
340 {
341 VkBuffer Buffer;
342 VkImage Image;
343 VmaAllocation Alloc;
344 };
345
346 std::vector<Allocation> commonAllocations;
347 std::mutex commonAllocationsMutex;
348
349 auto Allocate = [&](
350 VkDeviceSize bufferSize,
351 const VkExtent2D imageExtent,
352 RandomNumberGenerator& localRand,
353 VkDeviceSize& totalAllocatedBytes,
354 std::vector<Allocation>& allocations) -> VkResult
355 {
356 assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
357
358 uint32_t memUsageIndex = 0;
359 uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
360 while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
361 memUsageRand -= config.MemUsageProbability[memUsageIndex++];
362
363 VmaAllocationCreateInfo memReq = {};
364 memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
Adam Sawicki0667e332018-08-24 17:26:44 +0200365 memReq.flags |= config.AllocationStrategy;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100366
367 Allocation allocation = {};
368 VmaAllocationInfo allocationInfo;
369
370 // Buffer
371 if(bufferSize > 0)
372 {
373 assert(imageExtent.width == 0);
374 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
375 bufferInfo.size = bufferSize;
376 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
377
378 {
379 AllocationTimeRegisterObj timeRegisterObj{outResult};
380 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
381 }
382 }
383 // Image
384 else
385 {
386 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
387 imageInfo.imageType = VK_IMAGE_TYPE_2D;
388 imageInfo.extent.width = imageExtent.width;
389 imageInfo.extent.height = imageExtent.height;
390 imageInfo.extent.depth = 1;
391 imageInfo.mipLevels = 1;
392 imageInfo.arrayLayers = 1;
393 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
394 imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
395 VK_IMAGE_TILING_OPTIMAL :
396 VK_IMAGE_TILING_LINEAR;
397 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
398 switch(memReq.usage)
399 {
400 case VMA_MEMORY_USAGE_GPU_ONLY:
401 switch(localRand.Generate() % 3)
402 {
403 case 0:
404 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
405 break;
406 case 1:
407 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
408 break;
409 case 2:
410 imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
411 break;
412 }
413 break;
414 case VMA_MEMORY_USAGE_CPU_ONLY:
415 case VMA_MEMORY_USAGE_CPU_TO_GPU:
416 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
417 break;
418 case VMA_MEMORY_USAGE_GPU_TO_CPU:
419 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
420 break;
421 }
422 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
423 imageInfo.flags = 0;
424
425 {
426 AllocationTimeRegisterObj timeRegisterObj{outResult};
427 res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
428 }
429 }
430
431 if(res == VK_SUCCESS)
432 {
433 ++allocationCount;
434 totalAllocatedBytes += allocationInfo.size;
435 bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
436 if(useCommonAllocations)
437 {
438 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
439 commonAllocations.push_back(allocation);
440 }
441 else
442 allocations.push_back(allocation);
443 }
444 else
445 {
Adam Sawickib8d34d52018-10-03 17:41:20 +0200446 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100447 }
448 return res;
449 };
450
451 auto GetNextAllocationSize = [&](
452 VkDeviceSize& outBufSize,
453 VkExtent2D& outImageSize,
454 RandomNumberGenerator& localRand)
455 {
456 outBufSize = 0;
457 outImageSize = {0, 0};
458
459 uint32_t allocSizeIndex = 0;
460 uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
461 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
462 r -= config.AllocationSizes[allocSizeIndex++].Probability;
463
464 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
465 if(allocSize.BufferSizeMax > 0)
466 {
467 assert(allocSize.ImageSizeMax == 0);
468 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
469 outBufSize = allocSize.BufferSizeMin;
470 else
471 {
472 outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
473 outBufSize = outBufSize / 16 * 16;
474 }
475 }
476 else
477 {
478 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
479 outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
480 else
481 {
482 outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
483 outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
484 }
485 }
486 };
487
488 std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
489 HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
490
491 auto ThreadProc = [&](uint32_t randSeed) -> void
492 {
493 RandomNumberGenerator threadRand(randSeed);
494 VkDeviceSize threadTotalAllocatedBytes = 0;
495 std::vector<Allocation> threadAllocations;
496 VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
497 VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
498 uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
499
500 // BEGIN ALLOCATIONS
501 for(;;)
502 {
503 VkDeviceSize bufferSize = 0;
504 VkExtent2D imageExtent = {};
505 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
506 if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
507 threadBeginBytesToAllocate)
508 {
509 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
510 break;
511 }
512 else
513 break;
514 }
515
516 // ADDITIONAL ALLOCATIONS AND FREES
517 for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
518 {
519 VkDeviceSize bufferSize = 0;
520 VkExtent2D imageExtent = {};
521 GetNextAllocationSize(bufferSize, imageExtent, threadRand);
522
523 // true = allocate, false = free
524 bool allocate = threadRand.Generate() % 2 != 0;
525
526 if(allocate)
527 {
528 if(threadTotalAllocatedBytes +
529 bufferSize +
530 imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
531 threadMaxBytesToAllocate)
532 {
533 if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
534 break;
535 }
536 }
537 else
538 {
539 bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
540 if(useCommonAllocations)
541 {
542 std::unique_lock<std::mutex> lock(commonAllocationsMutex);
543 if(!commonAllocations.empty())
544 {
545 size_t indexToFree = threadRand.Generate() % commonAllocations.size();
546 VmaAllocationInfo allocationInfo;
547 vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
548 if(threadTotalAllocatedBytes >= allocationInfo.size)
549 {
550 DeallocationTimeRegisterObj timeRegisterObj{outResult};
551 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
552 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
553 else
554 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
555 threadTotalAllocatedBytes -= allocationInfo.size;
556 commonAllocations.erase(commonAllocations.begin() + indexToFree);
557 }
558 }
559 }
560 else
561 {
562 if(!threadAllocations.empty())
563 {
564 size_t indexToFree = threadRand.Generate() % threadAllocations.size();
565 VmaAllocationInfo allocationInfo;
566 vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
567 if(threadTotalAllocatedBytes >= allocationInfo.size)
568 {
569 DeallocationTimeRegisterObj timeRegisterObj{outResult};
570 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
571 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
572 else
573 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
574 threadTotalAllocatedBytes -= allocationInfo.size;
575 threadAllocations.erase(threadAllocations.begin() + indexToFree);
576 }
577 }
578 }
579 }
580 }
581
582 ++numThreadsReachedMaxAllocations;
583
584 WaitForSingleObject(threadsFinishEvent, INFINITE);
585
586 // DEALLOCATION
587 while(!threadAllocations.empty())
588 {
589 size_t indexToFree = 0;
590 switch(config.FreeOrder)
591 {
592 case FREE_ORDER::FORWARD:
593 indexToFree = 0;
594 break;
595 case FREE_ORDER::BACKWARD:
596 indexToFree = threadAllocations.size() - 1;
597 break;
598 case FREE_ORDER::RANDOM:
599 indexToFree = mainRand.Generate() % threadAllocations.size();
600 break;
601 }
602
603 {
604 DeallocationTimeRegisterObj timeRegisterObj{outResult};
605 if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
606 vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
607 else
608 vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
609 }
610 threadAllocations.erase(threadAllocations.begin() + indexToFree);
611 }
612 };
613
614 uint32_t threadRandSeed = mainRand.Generate();
615 std::vector<std::thread> bkgThreads;
616 for(size_t i = 0; i < config.ThreadCount; ++i)
617 {
618 bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
619 }
620
621 // Wait for threads reached max allocations
622 while(numThreadsReachedMaxAllocations < config.ThreadCount)
623 Sleep(0);
624
625 // CALCULATE MEMORY STATISTICS ON FINAL USAGE
626 VmaStats vmaStats = {};
627 vmaCalculateStats(g_hAllocator, &vmaStats);
628 outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
629 outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
630 outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
631
632 // Signal threads to deallocate
633 SetEvent(threadsFinishEvent);
634
635 // Wait for threads finished
636 for(size_t i = 0; i < bkgThreads.size(); ++i)
637 bkgThreads[i].join();
638 bkgThreads.clear();
639
640 CloseHandle(threadsFinishEvent);
641
642 // Deallocate remaining common resources
643 while(!commonAllocations.empty())
644 {
645 size_t indexToFree = 0;
646 switch(config.FreeOrder)
647 {
648 case FREE_ORDER::FORWARD:
649 indexToFree = 0;
650 break;
651 case FREE_ORDER::BACKWARD:
652 indexToFree = commonAllocations.size() - 1;
653 break;
654 case FREE_ORDER::RANDOM:
655 indexToFree = mainRand.Generate() % commonAllocations.size();
656 break;
657 }
658
659 {
660 DeallocationTimeRegisterObj timeRegisterObj{outResult};
661 if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
662 vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
663 else
664 vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
665 }
666 commonAllocations.erase(commonAllocations.begin() + indexToFree);
667 }
668
669 if(allocationCount)
670 {
671 outResult.AllocationTimeAvg /= allocationCount;
672 outResult.DeallocationTimeAvg /= allocationCount;
673 }
674
675 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
676
677 return res;
678}
679
Adam Sawicki51fa9662018-10-03 13:44:29 +0200680void SaveAllocatorStatsToFile(const wchar_t* filePath)
Adam Sawickib8333fb2018-03-13 16:15:53 +0100681{
Adam Sawicki4d844e22019-01-24 16:21:05 +0100682 wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100683 char* stats;
Adam Sawickie44c6262018-06-15 14:30:39 +0200684 vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100685 SaveFile(filePath, stats, strlen(stats));
Adam Sawickie44c6262018-06-15 14:30:39 +0200686 vmaFreeStatsString(g_hAllocator, stats);
Adam Sawickib8333fb2018-03-13 16:15:53 +0100687}
688
689struct AllocInfo
690{
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200691 VmaAllocation m_Allocation = VK_NULL_HANDLE;
692 VkBuffer m_Buffer = VK_NULL_HANDLE;
693 VkImage m_Image = VK_NULL_HANDLE;
Adam Sawickia52012d2019-12-23 15:28:51 +0100694 VkImageLayout m_ImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200695 uint32_t m_StartValue = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +0100696 union
697 {
698 VkBufferCreateInfo m_BufferInfo;
699 VkImageCreateInfo m_ImageInfo;
700 };
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200701
Adam Sawickic467e282019-12-23 16:38:31 +0100702 // After defragmentation.
703 VkBuffer m_NewBuffer = VK_NULL_HANDLE;
704 VkImage m_NewImage = VK_NULL_HANDLE;
705
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200706 void CreateBuffer(
707 const VkBufferCreateInfo& bufCreateInfo,
708 const VmaAllocationCreateInfo& allocCreateInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +0100709 void CreateImage(
710 const VkImageCreateInfo& imageCreateInfo,
711 const VmaAllocationCreateInfo& allocCreateInfo,
712 VkImageLayout layout);
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200713 void Destroy();
Adam Sawickib8333fb2018-03-13 16:15:53 +0100714};
715
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200716void AllocInfo::CreateBuffer(
717 const VkBufferCreateInfo& bufCreateInfo,
718 const VmaAllocationCreateInfo& allocCreateInfo)
719{
720 m_BufferInfo = bufCreateInfo;
721 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
722 TEST(res == VK_SUCCESS);
723}
Adam Sawickia52012d2019-12-23 15:28:51 +0100724void AllocInfo::CreateImage(
725 const VkImageCreateInfo& imageCreateInfo,
726 const VmaAllocationCreateInfo& allocCreateInfo,
727 VkImageLayout layout)
728{
729 m_ImageInfo = imageCreateInfo;
730 m_ImageLayout = layout;
731 VkResult res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &m_Image, &m_Allocation, nullptr);
732 TEST(res == VK_SUCCESS);
733}
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200734
735void AllocInfo::Destroy()
736{
737 if(m_Image)
738 {
Adam Sawickic467e282019-12-23 16:38:31 +0100739 assert(!m_Buffer);
Adam Sawicki1f84f622019-07-02 13:40:01 +0200740 vkDestroyImage(g_hDevice, m_Image, g_Allocs);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100741 m_Image = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200742 }
743 if(m_Buffer)
744 {
Adam Sawickic467e282019-12-23 16:38:31 +0100745 assert(!m_Image);
Adam Sawicki1f84f622019-07-02 13:40:01 +0200746 vkDestroyBuffer(g_hDevice, m_Buffer, g_Allocs);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100747 m_Buffer = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200748 }
749 if(m_Allocation)
750 {
751 vmaFreeMemory(g_hAllocator, m_Allocation);
Adam Sawickiddcbf8c2019-11-22 15:22:42 +0100752 m_Allocation = VK_NULL_HANDLE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +0200753 }
754}
755
Adam Sawickif2975342018-10-16 13:49:02 +0200756class StagingBufferCollection
757{
758public:
759 StagingBufferCollection() { }
760 ~StagingBufferCollection();
761 // Returns false if maximum total size of buffers would be exceeded.
762 bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
763 void ReleaseAllBuffers();
764
765private:
766 static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
767 struct BufInfo
768 {
769 VmaAllocation Allocation = VK_NULL_HANDLE;
770 VkBuffer Buffer = VK_NULL_HANDLE;
771 VkDeviceSize Size = VK_WHOLE_SIZE;
772 void* MappedPtr = nullptr;
773 bool Used = false;
774 };
775 std::vector<BufInfo> m_Bufs;
776 // Including both used and unused.
777 VkDeviceSize m_TotalSize = 0;
778};
779
780StagingBufferCollection::~StagingBufferCollection()
781{
782 for(size_t i = m_Bufs.size(); i--; )
783 {
784 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
785 }
786}
787
788bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
789{
790 assert(size <= MAX_TOTAL_SIZE);
791
792 // Try to find existing unused buffer with best size.
793 size_t bestIndex = SIZE_MAX;
794 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
795 {
796 BufInfo& currBufInfo = m_Bufs[i];
797 if(!currBufInfo.Used && currBufInfo.Size >= size &&
798 (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
799 {
800 bestIndex = i;
801 }
802 }
803
804 if(bestIndex != SIZE_MAX)
805 {
806 m_Bufs[bestIndex].Used = true;
807 outBuffer = m_Bufs[bestIndex].Buffer;
808 outMappedPtr = m_Bufs[bestIndex].MappedPtr;
809 return true;
810 }
811
812 // Allocate new buffer with requested size.
813 if(m_TotalSize + size <= MAX_TOTAL_SIZE)
814 {
815 BufInfo bufInfo;
816 bufInfo.Size = size;
817 bufInfo.Used = true;
818
819 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
820 bufCreateInfo.size = size;
821 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
822
823 VmaAllocationCreateInfo allocCreateInfo = {};
824 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
825 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
826
827 VmaAllocationInfo allocInfo;
828 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
829 bufInfo.MappedPtr = allocInfo.pMappedData;
830 TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
831
832 outBuffer = bufInfo.Buffer;
833 outMappedPtr = bufInfo.MappedPtr;
834
835 m_Bufs.push_back(std::move(bufInfo));
836
837 m_TotalSize += size;
838
839 return true;
840 }
841
842 // There are some unused but smaller buffers: Free them and try again.
843 bool hasUnused = false;
844 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
845 {
846 if(!m_Bufs[i].Used)
847 {
848 hasUnused = true;
849 break;
850 }
851 }
852 if(hasUnused)
853 {
854 for(size_t i = m_Bufs.size(); i--; )
855 {
856 if(!m_Bufs[i].Used)
857 {
858 m_TotalSize -= m_Bufs[i].Size;
859 vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
860 m_Bufs.erase(m_Bufs.begin() + i);
861 }
862 }
863
864 return AcquireBuffer(size, outBuffer, outMappedPtr);
865 }
866
867 return false;
868}
869
870void StagingBufferCollection::ReleaseAllBuffers()
871{
872 for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
873 {
874 m_Bufs[i].Used = false;
875 }
876}
877
878static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
879{
880 StagingBufferCollection stagingBufs;
881
882 bool cmdBufferStarted = false;
883 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
884 {
885 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
886 if(currAllocInfo.m_Buffer)
887 {
888 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
889
890 VkBuffer stagingBuf = VK_NULL_HANDLE;
891 void* stagingBufMappedPtr = nullptr;
892 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
893 {
894 TEST(cmdBufferStarted);
895 EndSingleTimeCommands();
896 stagingBufs.ReleaseAllBuffers();
897 cmdBufferStarted = false;
898
899 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
900 TEST(ok);
901 }
902
903 // Fill staging buffer.
904 {
905 assert(size % sizeof(uint32_t) == 0);
906 uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
907 uint32_t val = currAllocInfo.m_StartValue;
908 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
909 {
910 *stagingValPtr = val;
911 ++stagingValPtr;
912 ++val;
913 }
914 }
915
916 // Issue copy command from staging buffer to destination buffer.
917 if(!cmdBufferStarted)
918 {
919 cmdBufferStarted = true;
920 BeginSingleTimeCommands();
921 }
922
923 VkBufferCopy copy = {};
924 copy.srcOffset = 0;
925 copy.dstOffset = 0;
926 copy.size = size;
927 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, &copy);
928 }
929 else
930 {
Adam Sawickia52012d2019-12-23 15:28:51 +0100931 TEST(currAllocInfo.m_ImageInfo.format == VK_FORMAT_R8G8B8A8_UNORM && "Only RGBA8 images are currently supported.");
932 TEST(currAllocInfo.m_ImageInfo.mipLevels == 1 && "Only single mip images are currently supported.");
933
Adam Sawickic467e282019-12-23 16:38:31 +0100934 const VkDeviceSize size = (VkDeviceSize)currAllocInfo.m_ImageInfo.extent.width * currAllocInfo.m_ImageInfo.extent.height * sizeof(uint32_t);
Adam Sawickia52012d2019-12-23 15:28:51 +0100935
936 VkBuffer stagingBuf = VK_NULL_HANDLE;
937 void* stagingBufMappedPtr = nullptr;
938 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
939 {
940 TEST(cmdBufferStarted);
941 EndSingleTimeCommands();
942 stagingBufs.ReleaseAllBuffers();
943 cmdBufferStarted = false;
944
945 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
946 TEST(ok);
947 }
948
949 // Fill staging buffer.
950 {
951 assert(size % sizeof(uint32_t) == 0);
952 uint32_t *stagingValPtr = (uint32_t *)stagingBufMappedPtr;
953 uint32_t val = currAllocInfo.m_StartValue;
954 for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
955 {
956 *stagingValPtr = val;
957 ++stagingValPtr;
958 ++val;
959 }
960 }
961
962 // Issue copy command from staging buffer to destination buffer.
963 if(!cmdBufferStarted)
964 {
965 cmdBufferStarted = true;
966 BeginSingleTimeCommands();
967 }
968
969
970 // Transfer to transfer dst layout
971 VkImageSubresourceRange subresourceRange = {
972 VK_IMAGE_ASPECT_COLOR_BIT,
973 0, VK_REMAINING_MIP_LEVELS,
974 0, VK_REMAINING_ARRAY_LAYERS
975 };
976
977 VkImageMemoryBarrier barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
978 barrier.srcAccessMask = 0;
979 barrier.dstAccessMask = 0;
980 barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
981 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
982 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
983 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
984 barrier.image = currAllocInfo.m_Image;
985 barrier.subresourceRange = subresourceRange;
986
987 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,
988 0, nullptr,
989 0, nullptr,
990 1, &barrier);
991
992 // Copy image date
993 VkBufferImageCopy copy = {};
994 copy.bufferOffset = 0;
995 copy.bufferRowLength = 0;
996 copy.bufferImageHeight = 0;
997 copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
998 copy.imageSubresource.layerCount = 1;
999 copy.imageExtent = currAllocInfo.m_ImageInfo.extent;
1000
1001 vkCmdCopyBufferToImage(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy);
1002
1003 // Transfer to desired layout
1004 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1005 barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
1006 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1007 barrier.newLayout = currAllocInfo.m_ImageLayout;
1008
1009 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,
1010 0, nullptr,
1011 0, nullptr,
1012 1, &barrier);
Adam Sawickif2975342018-10-16 13:49:02 +02001013 }
1014 }
1015
1016 if(cmdBufferStarted)
1017 {
1018 EndSingleTimeCommands();
1019 stagingBufs.ReleaseAllBuffers();
1020 }
1021}
1022
1023static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
1024{
1025 StagingBufferCollection stagingBufs;
1026
1027 bool cmdBufferStarted = false;
1028 size_t validateAllocIndexOffset = 0;
1029 std::vector<void*> validateStagingBuffers;
1030 for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
1031 {
1032 const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
1033 if(currAllocInfo.m_Buffer)
1034 {
1035 const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
1036
1037 VkBuffer stagingBuf = VK_NULL_HANDLE;
1038 void* stagingBufMappedPtr = nullptr;
1039 if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
1040 {
1041 TEST(cmdBufferStarted);
1042 EndSingleTimeCommands();
1043 cmdBufferStarted = false;
1044
1045 for(size_t validateIndex = 0;
1046 validateIndex < validateStagingBuffers.size();
1047 ++validateIndex)
1048 {
1049 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
1050 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
1051 TEST(validateSize % sizeof(uint32_t) == 0);
1052 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
1053 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
1054 bool valid = true;
1055 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
1056 {
1057 if(*stagingValPtr != val)
1058 {
1059 valid = false;
1060 break;
1061 }
1062 ++stagingValPtr;
1063 ++val;
1064 }
1065 TEST(valid);
1066 }
1067
1068 stagingBufs.ReleaseAllBuffers();
1069
1070 validateAllocIndexOffset = allocInfoIndex;
1071 validateStagingBuffers.clear();
1072
1073 bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
1074 TEST(ok);
1075 }
1076
1077 // Issue copy command from staging buffer to destination buffer.
1078 if(!cmdBufferStarted)
1079 {
1080 cmdBufferStarted = true;
1081 BeginSingleTimeCommands();
1082 }
1083
1084 VkBufferCopy copy = {};
1085 copy.srcOffset = 0;
1086 copy.dstOffset = 0;
1087 copy.size = size;
1088 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, &copy);
1089
1090 // Sava mapped pointer for later validation.
1091 validateStagingBuffers.push_back(stagingBufMappedPtr);
1092 }
1093 else
1094 {
1095 TEST(0 && "Images not currently supported.");
1096 }
1097 }
1098
1099 if(cmdBufferStarted)
1100 {
1101 EndSingleTimeCommands();
1102
1103 for(size_t validateIndex = 0;
1104 validateIndex < validateStagingBuffers.size();
1105 ++validateIndex)
1106 {
1107 const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
1108 const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
1109 TEST(validateSize % sizeof(uint32_t) == 0);
1110 const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
1111 uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
1112 bool valid = true;
1113 for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
1114 {
1115 if(*stagingValPtr != val)
1116 {
1117 valid = false;
1118 break;
1119 }
1120 ++stagingValPtr;
1121 ++val;
1122 }
1123 TEST(valid);
1124 }
1125
1126 stagingBufs.ReleaseAllBuffers();
1127 }
1128}
1129
Adam Sawickib8333fb2018-03-13 16:15:53 +01001130static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
1131{
1132 outMemReq = {};
1133 outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
1134 //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
1135}
1136
1137static void CreateBuffer(
1138 VmaPool pool,
1139 const VkBufferCreateInfo& bufCreateInfo,
1140 bool persistentlyMapped,
1141 AllocInfo& outAllocInfo)
1142{
1143 outAllocInfo = {};
1144 outAllocInfo.m_BufferInfo = bufCreateInfo;
1145
1146 VmaAllocationCreateInfo allocCreateInfo = {};
1147 allocCreateInfo.pool = pool;
1148 if(persistentlyMapped)
1149 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
1150
1151 VmaAllocationInfo vmaAllocInfo = {};
1152 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
1153
1154 // Setup StartValue and fill.
1155 {
1156 outAllocInfo.m_StartValue = (uint32_t)rand();
1157 uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001158 TEST((data != nullptr) == persistentlyMapped);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001159 if(!persistentlyMapped)
1160 {
1161 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
1162 }
1163
1164 uint32_t value = outAllocInfo.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001165 TEST(bufCreateInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001166 for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
1167 data[i] = value++;
1168
1169 if(!persistentlyMapped)
1170 vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
1171 }
1172}
1173
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001174static void CreateAllocation(AllocInfo& outAllocation)
Adam Sawickib8333fb2018-03-13 16:15:53 +01001175{
1176 outAllocation.m_Allocation = nullptr;
1177 outAllocation.m_Buffer = nullptr;
1178 outAllocation.m_Image = nullptr;
1179 outAllocation.m_StartValue = (uint32_t)rand();
1180
1181 VmaAllocationCreateInfo vmaMemReq;
1182 GetMemReq(vmaMemReq);
1183
1184 VmaAllocationInfo allocInfo;
1185
1186 const bool isBuffer = true;//(rand() & 0x1) != 0;
1187 const bool isLarge = (rand() % 16) == 0;
1188 if(isBuffer)
1189 {
1190 const uint32_t bufferSize = isLarge ?
1191 (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
1192 (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
1193
1194 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1195 bufferInfo.size = bufferSize;
1196 bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1197
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001198 VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001199 outAllocation.m_BufferInfo = bufferInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001200 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001201 }
1202 else
1203 {
1204 const uint32_t imageSizeX = isLarge ?
1205 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1206 rand() % 1024 + 1; // 1 ... 1024
1207 const uint32_t imageSizeY = isLarge ?
1208 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
1209 rand() % 1024 + 1; // 1 ... 1024
1210
1211 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
1212 imageInfo.imageType = VK_IMAGE_TYPE_2D;
1213 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
1214 imageInfo.extent.width = imageSizeX;
1215 imageInfo.extent.height = imageSizeY;
1216 imageInfo.extent.depth = 1;
1217 imageInfo.mipLevels = 1;
1218 imageInfo.arrayLayers = 1;
1219 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
1220 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
1221 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
1222 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1223
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001224 VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001225 outAllocation.m_ImageInfo = imageInfo;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001226 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001227 }
1228
1229 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1230 if(allocInfo.pMappedData == nullptr)
1231 {
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001232 VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001233 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001234 }
1235
1236 uint32_t value = outAllocation.m_StartValue;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001237 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001238 for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1239 data[i] = value++;
1240
1241 if(allocInfo.pMappedData == nullptr)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001242 vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001243}
1244
1245static void DestroyAllocation(const AllocInfo& allocation)
1246{
1247 if(allocation.m_Buffer)
1248 vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
1249 else
1250 vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
1251}
1252
1253static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
1254{
1255 for(size_t i = allocations.size(); i--; )
1256 DestroyAllocation(allocations[i]);
1257 allocations.clear();
1258}
1259
1260static void ValidateAllocationData(const AllocInfo& allocation)
1261{
1262 VmaAllocationInfo allocInfo;
1263 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1264
1265 uint32_t* data = (uint32_t*)allocInfo.pMappedData;
1266 if(allocInfo.pMappedData == nullptr)
1267 {
1268 VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001269 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001270 }
1271
1272 uint32_t value = allocation.m_StartValue;
1273 bool ok = true;
1274 size_t i;
Adam Sawickib8d34d52018-10-03 17:41:20 +02001275 TEST(allocInfo.size % 4 == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001276 for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
1277 {
1278 if(data[i] != value++)
1279 {
1280 ok = false;
1281 break;
1282 }
1283 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02001284 TEST(ok);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001285
1286 if(allocInfo.pMappedData == nullptr)
1287 vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
1288}
1289
1290static void RecreateAllocationResource(AllocInfo& allocation)
1291{
1292 VmaAllocationInfo allocInfo;
1293 vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
1294
1295 if(allocation.m_Buffer)
1296 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001297 vkDestroyBuffer(g_hDevice, allocation.m_Buffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001298
Adam Sawicki1f84f622019-07-02 13:40:01 +02001299 VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, g_Allocs, &allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001300 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001301
1302 // Just to silence validation layer warnings.
1303 VkMemoryRequirements vkMemReq;
1304 vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
Adam Sawicki2af57d72018-12-06 15:35:05 +01001305 TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001306
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001307 res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001308 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001309 }
1310 else
1311 {
Adam Sawicki1f84f622019-07-02 13:40:01 +02001312 vkDestroyImage(g_hDevice, allocation.m_Image, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001313
Adam Sawicki1f84f622019-07-02 13:40:01 +02001314 VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, g_Allocs, &allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001315 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001316
1317 // Just to silence validation layer warnings.
1318 VkMemoryRequirements vkMemReq;
1319 vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
1320
Adam Sawickiaf88c1b2019-07-02 12:34:26 +02001321 res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001322 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001323 }
1324}
1325
1326static void Defragment(AllocInfo* allocs, size_t allocCount,
1327 const VmaDefragmentationInfo* defragmentationInfo = nullptr,
1328 VmaDefragmentationStats* defragmentationStats = nullptr)
1329{
1330 std::vector<VmaAllocation> vmaAllocs(allocCount);
1331 for(size_t i = 0; i < allocCount; ++i)
1332 vmaAllocs[i] = allocs[i].m_Allocation;
1333
1334 std::vector<VkBool32> allocChanged(allocCount);
1335
1336 ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
1337 defragmentationInfo, defragmentationStats) );
1338
1339 for(size_t i = 0; i < allocCount; ++i)
1340 {
1341 if(allocChanged[i])
1342 {
1343 RecreateAllocationResource(allocs[i]);
1344 }
1345 }
1346}
1347
1348static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
1349{
1350 std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
1351 ValidateAllocationData(allocInfo);
1352 });
1353}
1354
1355void TestDefragmentationSimple()
1356{
1357 wprintf(L"Test defragmentation simple\n");
1358
1359 RandomNumberGenerator rand(667);
1360
1361 const VkDeviceSize BUF_SIZE = 0x10000;
1362 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1363
1364 const VkDeviceSize MIN_BUF_SIZE = 32;
1365 const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
1366 auto RandomBufSize = [&]() -> VkDeviceSize {
1367 return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
1368 };
1369
1370 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1371 bufCreateInfo.size = BUF_SIZE;
1372 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1373
1374 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1375 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1376
1377 uint32_t memTypeIndex = UINT32_MAX;
1378 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1379
1380 VmaPoolCreateInfo poolCreateInfo = {};
1381 poolCreateInfo.blockSize = BLOCK_SIZE;
1382 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1383
1384 VmaPool pool;
1385 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1386
Adam Sawickie1681912018-11-23 17:50:12 +01001387 // Defragmentation of empty pool.
1388 {
1389 VmaDefragmentationInfo2 defragInfo = {};
1390 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1391 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1392 defragInfo.poolCount = 1;
1393 defragInfo.pPools = &pool;
1394
1395 VmaDefragmentationStats defragStats = {};
1396 VmaDefragmentationContext defragCtx = nullptr;
1397 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
1398 TEST(res >= VK_SUCCESS);
1399 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1400 TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
1401 defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
1402 }
1403
Adam Sawickib8333fb2018-03-13 16:15:53 +01001404 std::vector<AllocInfo> allocations;
1405
1406 // persistentlyMappedOption = 0 - not persistently mapped.
1407 // persistentlyMappedOption = 1 - persistently mapped.
1408 for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
1409 {
1410 wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
1411 const bool persistentlyMapped = persistentlyMappedOption != 0;
1412
1413 // # Test 1
1414 // Buffers of fixed size.
1415 // Fill 2 blocks. Remove odd buffers. Defragment everything.
1416 // Expected result: at least 1 block freed.
1417 {
1418 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1419 {
1420 AllocInfo allocInfo;
1421 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1422 allocations.push_back(allocInfo);
1423 }
1424
1425 for(size_t i = 1; i < allocations.size(); ++i)
1426 {
1427 DestroyAllocation(allocations[i]);
1428 allocations.erase(allocations.begin() + i);
1429 }
1430
1431 VmaDefragmentationStats defragStats;
1432 Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001433 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
1434 TEST(defragStats.deviceMemoryBlocksFreed >= 1);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001435
1436 ValidateAllocationsData(allocations.data(), allocations.size());
1437
1438 DestroyAllAllocations(allocations);
1439 }
1440
1441 // # Test 2
1442 // Buffers of fixed size.
1443 // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
1444 // Expected result: Each of 4 interations makes some progress.
1445 {
1446 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1447 {
1448 AllocInfo allocInfo;
1449 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1450 allocations.push_back(allocInfo);
1451 }
1452
1453 for(size_t i = 1; i < allocations.size(); ++i)
1454 {
1455 DestroyAllocation(allocations[i]);
1456 allocations.erase(allocations.begin() + i);
1457 }
1458
1459 VmaDefragmentationInfo defragInfo = {};
1460 defragInfo.maxAllocationsToMove = 1;
1461 defragInfo.maxBytesToMove = BUF_SIZE;
1462
1463 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
1464 {
1465 VmaDefragmentationStats defragStats;
1466 Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001467 TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001468 }
1469
1470 ValidateAllocationsData(allocations.data(), allocations.size());
1471
1472 DestroyAllAllocations(allocations);
1473 }
1474
1475 // # Test 3
1476 // Buffers of variable size.
1477 // Create a number of buffers. Remove some percent of them.
1478 // Defragment while having some percent of them unmovable.
1479 // Expected result: Just simple validation.
1480 {
1481 for(size_t i = 0; i < 100; ++i)
1482 {
1483 VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
1484 localBufCreateInfo.size = RandomBufSize();
1485
1486 AllocInfo allocInfo;
1487 CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
1488 allocations.push_back(allocInfo);
1489 }
1490
1491 const uint32_t percentToDelete = 60;
1492 const size_t numberToDelete = allocations.size() * percentToDelete / 100;
1493 for(size_t i = 0; i < numberToDelete; ++i)
1494 {
1495 size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
1496 DestroyAllocation(allocations[indexToDelete]);
1497 allocations.erase(allocations.begin() + indexToDelete);
1498 }
1499
1500 // Non-movable allocations will be at the beginning of allocations array.
1501 const uint32_t percentNonMovable = 20;
1502 const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
1503 for(size_t i = 0; i < numberNonMovable; ++i)
1504 {
1505 size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
1506 if(indexNonMovable != i)
1507 std::swap(allocations[i], allocations[indexNonMovable]);
1508 }
1509
1510 VmaDefragmentationStats defragStats;
1511 Defragment(
1512 allocations.data() + numberNonMovable,
1513 allocations.size() - numberNonMovable,
1514 nullptr, &defragStats);
1515
1516 ValidateAllocationsData(allocations.data(), allocations.size());
1517
1518 DestroyAllAllocations(allocations);
1519 }
1520 }
1521
Adam Sawicki647cf242018-11-23 17:58:00 +01001522 /*
1523 Allocation that must be move to an overlapping place using memmove().
1524 Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
1525 */
Adam Sawickibdb89a92018-12-13 11:56:30 +01001526 if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
Adam Sawicki647cf242018-11-23 17:58:00 +01001527 {
1528 AllocInfo allocInfo[2];
1529
1530 bufCreateInfo.size = BUF_SIZE;
1531 CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
1532 const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
1533 bufCreateInfo.size = biggerBufSize;
1534 CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
1535
1536 DestroyAllocation(allocInfo[0]);
1537
1538 VmaDefragmentationStats defragStats;
1539 Defragment(&allocInfo[1], 1, nullptr, &defragStats);
1540 // If this fails, it means we couldn't do memmove with overlapping regions.
1541 TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
1542
1543 ValidateAllocationsData(&allocInfo[1], 1);
1544 DestroyAllocation(allocInfo[1]);
1545 }
1546
Adam Sawickib8333fb2018-03-13 16:15:53 +01001547 vmaDestroyPool(g_hAllocator, pool);
1548}
1549
Adam Sawicki52076eb2018-11-22 16:14:50 +01001550void TestDefragmentationWholePool()
1551{
1552 wprintf(L"Test defragmentation whole pool\n");
1553
1554 RandomNumberGenerator rand(668);
1555
1556 const VkDeviceSize BUF_SIZE = 0x10000;
1557 const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
1558
1559 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
1560 bufCreateInfo.size = BUF_SIZE;
1561 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1562
1563 VmaAllocationCreateInfo exampleAllocCreateInfo = {};
1564 exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
1565
1566 uint32_t memTypeIndex = UINT32_MAX;
1567 vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
1568
1569 VmaPoolCreateInfo poolCreateInfo = {};
1570 poolCreateInfo.blockSize = BLOCK_SIZE;
1571 poolCreateInfo.memoryTypeIndex = memTypeIndex;
1572
1573 VmaDefragmentationStats defragStats[2];
1574 for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
1575 {
1576 VmaPool pool;
1577 ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
1578
1579 std::vector<AllocInfo> allocations;
1580
1581 // Buffers of fixed size.
1582 // Fill 2 blocks. Remove odd buffers. Defragment all of them.
1583 for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
1584 {
1585 AllocInfo allocInfo;
1586 CreateBuffer(pool, bufCreateInfo, false, allocInfo);
1587 allocations.push_back(allocInfo);
1588 }
1589
1590 for(size_t i = 1; i < allocations.size(); ++i)
1591 {
1592 DestroyAllocation(allocations[i]);
1593 allocations.erase(allocations.begin() + i);
1594 }
1595
1596 VmaDefragmentationInfo2 defragInfo = {};
1597 defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
1598 defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
1599 std::vector<VmaAllocation> allocationsToDefrag;
1600 if(caseIndex == 0)
1601 {
1602 defragInfo.poolCount = 1;
1603 defragInfo.pPools = &pool;
1604 }
1605 else
1606 {
1607 const size_t allocCount = allocations.size();
1608 allocationsToDefrag.resize(allocCount);
1609 std::transform(
1610 allocations.begin(), allocations.end(),
1611 allocationsToDefrag.begin(),
1612 [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
1613 defragInfo.allocationCount = (uint32_t)allocCount;
1614 defragInfo.pAllocations = allocationsToDefrag.data();
1615 }
1616
1617 VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
1618 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
1619 TEST(res >= VK_SUCCESS);
1620 vmaDefragmentationEnd(g_hAllocator, defragCtx);
1621
1622 TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
1623
1624 ValidateAllocationsData(allocations.data(), allocations.size());
1625
1626 DestroyAllAllocations(allocations);
1627
1628 vmaDestroyPool(g_hAllocator, pool);
1629 }
1630
1631 TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
1632 TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
1633 TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
1634 TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
1635}
1636
Adam Sawickib8333fb2018-03-13 16:15:53 +01001637void TestDefragmentationFull()
1638{
1639 std::vector<AllocInfo> allocations;
1640
1641 // Create initial allocations.
1642 for(size_t i = 0; i < 400; ++i)
1643 {
1644 AllocInfo allocation;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001645 CreateAllocation(allocation);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001646 allocations.push_back(allocation);
1647 }
1648
1649 // Delete random allocations
1650 const size_t allocationsToDeletePercent = 80;
1651 size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
1652 for(size_t i = 0; i < allocationsToDelete; ++i)
1653 {
1654 size_t index = (size_t)rand() % allocations.size();
1655 DestroyAllocation(allocations[index]);
1656 allocations.erase(allocations.begin() + index);
1657 }
1658
1659 for(size_t i = 0; i < allocations.size(); ++i)
1660 ValidateAllocationData(allocations[i]);
1661
Adam Sawicki0667e332018-08-24 17:26:44 +02001662 //SaveAllocatorStatsToFile(L"Before.csv");
Adam Sawickib8333fb2018-03-13 16:15:53 +01001663
1664 {
1665 std::vector<VmaAllocation> vmaAllocations(allocations.size());
1666 for(size_t i = 0; i < allocations.size(); ++i)
1667 vmaAllocations[i] = allocations[i].m_Allocation;
1668
1669 const size_t nonMovablePercent = 0;
1670 size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
1671 for(size_t i = 0; i < nonMovableCount; ++i)
1672 {
1673 size_t index = (size_t)rand() % vmaAllocations.size();
1674 vmaAllocations.erase(vmaAllocations.begin() + index);
1675 }
1676
1677 const uint32_t defragCount = 1;
1678 for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
1679 {
1680 std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
1681
1682 VmaDefragmentationInfo defragmentationInfo;
1683 defragmentationInfo.maxAllocationsToMove = UINT_MAX;
1684 defragmentationInfo.maxBytesToMove = SIZE_MAX;
1685
1686 wprintf(L"Defragmentation #%u\n", defragIndex);
1687
1688 time_point begTime = std::chrono::high_resolution_clock::now();
1689
1690 VmaDefragmentationStats stats;
1691 VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02001692 TEST(res >= 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001693
1694 float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
1695
1696 wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
1697 wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
1698 wprintf(L"Time: %.2f s\n", defragmentDuration);
1699
1700 for(size_t i = 0; i < vmaAllocations.size(); ++i)
1701 {
1702 if(allocationsChanged[i])
1703 {
1704 RecreateAllocationResource(allocations[i]);
1705 }
1706 }
1707
1708 for(size_t i = 0; i < allocations.size(); ++i)
1709 ValidateAllocationData(allocations[i]);
1710
Adam Sawicki0667e332018-08-24 17:26:44 +02001711 //wchar_t fileName[MAX_PATH];
1712 //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
1713 //SaveAllocatorStatsToFile(fileName);
Adam Sawickib8333fb2018-03-13 16:15:53 +01001714 }
1715 }
1716
1717 // Destroy all remaining allocations.
1718 DestroyAllAllocations(allocations);
1719}
1720
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001721static void TestDefragmentationGpu()
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001722{
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001723 wprintf(L"Test defragmentation GPU\n");
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001724
1725 std::vector<AllocInfo> allocations;
1726
1727 // Create that many allocations to surely fill 3 new blocks of 256 MB.
Adam Sawickic6ede152018-11-16 17:04:14 +01001728 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
1729 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001730 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic6ede152018-11-16 17:04:14 +01001731 const size_t bufCount = (size_t)(totalSize / bufSizeMin);
1732 const size_t percentToLeave = 30;
1733 const size_t percentNonMovable = 3;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001734 RandomNumberGenerator rand = { 234522 };
1735
1736 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001737
1738 VmaAllocationCreateInfo allocCreateInfo = {};
1739 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
Adam Sawickic6ede152018-11-16 17:04:14 +01001740 allocCreateInfo.flags = 0;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001741
1742 // Create all intended buffers.
1743 for(size_t i = 0; i < bufCount; ++i)
1744 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001745 bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
1746
1747 if(rand.Generate() % 100 < percentNonMovable)
1748 {
1749 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
1750 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1751 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1752 allocCreateInfo.pUserData = (void*)(uintptr_t)2;
1753 }
1754 else
1755 {
1756 // Different usage just to see different color in output from VmaDumpVis.
1757 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
1758 VK_BUFFER_USAGE_TRANSFER_DST_BIT |
1759 VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1760 // And in JSON dump.
1761 allocCreateInfo.pUserData = (void*)(uintptr_t)1;
1762 }
1763
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001764 AllocInfo alloc;
1765 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
1766 alloc.m_StartValue = rand.Generate();
1767 allocations.push_back(alloc);
1768 }
1769
1770 // Destroy some percentage of them.
1771 {
1772 const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
1773 for(size_t i = 0; i < buffersToDestroy; ++i)
1774 {
1775 const size_t index = rand.Generate() % allocations.size();
1776 allocations[index].Destroy();
1777 allocations.erase(allocations.begin() + index);
1778 }
1779 }
1780
1781 // Fill them with meaningful data.
1782 UploadGpuData(allocations.data(), allocations.size());
1783
Adam Sawickic6ede152018-11-16 17:04:14 +01001784 wchar_t fileName[MAX_PATH];
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001785 swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001786 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001787
1788 // Defragment using GPU only.
1789 {
1790 const size_t allocCount = allocations.size();
Adam Sawicki440307e2018-10-18 15:05:19 +02001791
Adam Sawickic6ede152018-11-16 17:04:14 +01001792 std::vector<VmaAllocation> allocationPtrs;
1793 std::vector<VkBool32> allocationChanged;
1794 std::vector<size_t> allocationOriginalIndex;
1795
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001796 for(size_t i = 0; i < allocCount; ++i)
1797 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001798 VmaAllocationInfo allocInfo = {};
1799 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
1800 if((uintptr_t)allocInfo.pUserData == 1) // Movable
1801 {
1802 allocationPtrs.push_back(allocations[i].m_Allocation);
1803 allocationChanged.push_back(VK_FALSE);
1804 allocationOriginalIndex.push_back(i);
1805 }
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001806 }
Adam Sawickic6ede152018-11-16 17:04:14 +01001807
1808 const size_t movableAllocCount = allocationPtrs.size();
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001809
1810 BeginSingleTimeCommands();
1811
1812 VmaDefragmentationInfo2 defragInfo = {};
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001813 defragInfo.flags = 0;
Adam Sawickic6ede152018-11-16 17:04:14 +01001814 defragInfo.allocationCount = (uint32_t)movableAllocCount;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001815 defragInfo.pAllocations = allocationPtrs.data();
Adam Sawicki440307e2018-10-18 15:05:19 +02001816 defragInfo.pAllocationsChanged = allocationChanged.data();
1817 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001818 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
1819 defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
1820
1821 VmaDefragmentationStats stats = {};
1822 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
1823 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
1824 TEST(res >= VK_SUCCESS);
1825
1826 EndSingleTimeCommands();
1827
1828 vmaDefragmentationEnd(g_hAllocator, ctx);
1829
Adam Sawickic6ede152018-11-16 17:04:14 +01001830 for(size_t i = 0; i < movableAllocCount; ++i)
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001831 {
1832 if(allocationChanged[i])
1833 {
Adam Sawickic6ede152018-11-16 17:04:14 +01001834 const size_t origAllocIndex = allocationOriginalIndex[i];
1835 RecreateAllocationResource(allocations[origAllocIndex]);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001836 }
1837 }
1838
Adam Sawicki4d844e22019-01-24 16:21:05 +01001839 // If corruption detection is enabled, GPU defragmentation may not work on
1840 // memory types that have this detection active, e.g. on Intel.
Adam Sawickia1f727c2019-01-24 16:25:11 +01001841 #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
Adam Sawicki4d844e22019-01-24 16:21:05 +01001842 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
1843 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
Adam Sawickia1f727c2019-01-24 16:25:11 +01001844 #endif
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001845 }
1846
1847 ValidateGpuData(allocations.data(), allocations.size());
1848
Adam Sawicki9a4f5082018-11-23 17:26:05 +01001849 swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
Adam Sawickic6ede152018-11-16 17:04:14 +01001850 SaveAllocatorStatsToFile(fileName);
Adam Sawickiff0f7b82018-10-18 14:44:05 +02001851
1852 // Destroy all remaining buffers.
1853 for(size_t i = allocations.size(); i--; )
1854 {
1855 allocations[i].Destroy();
1856 }
1857}
1858
Adam Sawickic467e282019-12-23 16:38:31 +01001859static void ProcessDefragmentationStepInfo(VmaDefragmentationPassInfo &stepInfo)
Adam Sawickia52012d2019-12-23 15:28:51 +01001860{
1861 std::vector<VkImageMemoryBarrier> beginImageBarriers;
1862 std::vector<VkImageMemoryBarrier> finalizeImageBarriers;
1863
1864 VkPipelineStageFlags beginSrcStageMask = 0;
1865 VkPipelineStageFlags beginDstStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT;
1866
1867 VkPipelineStageFlags finalizeSrcStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT;
1868 VkPipelineStageFlags finalizeDstStageMask = 0;
1869
1870 bool wantsMemoryBarrier = false;
1871
1872 VkMemoryBarrier beginMemoryBarrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER };
1873 VkMemoryBarrier finalizeMemoryBarrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER };
1874
Adam Sawickic467e282019-12-23 16:38:31 +01001875 for(uint32_t i = 0; i < stepInfo.moveCount; ++i)
Adam Sawickia52012d2019-12-23 15:28:51 +01001876 {
1877 VmaAllocationInfo info;
1878 vmaGetAllocationInfo(g_hAllocator, stepInfo.pMoves[i].allocation, &info);
1879
1880 AllocInfo *allocInfo = (AllocInfo *)info.pUserData;
1881
1882 if(allocInfo->m_Image)
1883 {
1884 VkImage newImage;
1885
1886 const VkResult result = vkCreateImage(g_hDevice, &allocInfo->m_ImageInfo, g_Allocs, &newImage);
1887 TEST(result >= VK_SUCCESS);
1888
1889 vkBindImageMemory(g_hDevice, newImage, stepInfo.pMoves[i].memory, stepInfo.pMoves[i].offset);
Adam Sawickic467e282019-12-23 16:38:31 +01001890 allocInfo->m_NewImage = newImage;
Adam Sawickia52012d2019-12-23 15:28:51 +01001891
1892 // Keep track of our pipeline stages that we need to wait/signal on
1893 beginSrcStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1894 finalizeDstStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1895
1896 // We need one pipeline barrier and two image layout transitions here
1897 // First we'll have to turn our newly created image into VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
1898 // And the second one is turning the old image into VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
1899
1900 VkImageSubresourceRange subresourceRange = {
1901 VK_IMAGE_ASPECT_COLOR_BIT,
1902 0, VK_REMAINING_MIP_LEVELS,
1903 0, VK_REMAINING_ARRAY_LAYERS
1904 };
1905
1906 VkImageMemoryBarrier barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
1907 barrier.srcAccessMask = 0;
1908 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1909 barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
1910 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1911 barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1912 barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1913 barrier.image = newImage;
1914 barrier.subresourceRange = subresourceRange;
1915
1916 beginImageBarriers.push_back(barrier);
1917
1918 // Second barrier to convert the existing image. This one actually needs a real barrier
1919 barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT;
1920 barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1921 barrier.oldLayout = allocInfo->m_ImageLayout;
1922 barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
1923 barrier.image = allocInfo->m_Image;
1924
1925 beginImageBarriers.push_back(barrier);
1926
1927 // And lastly we need a barrier that turns our new image into the layout of the old one
1928 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1929 barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
1930 barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
1931 barrier.newLayout = allocInfo->m_ImageLayout;
1932 barrier.image = newImage;
1933
1934 finalizeImageBarriers.push_back(barrier);
1935 }
1936 else if(allocInfo->m_Buffer)
1937 {
1938 VkBuffer newBuffer;
1939
1940 const VkResult result = vkCreateBuffer(g_hDevice, &allocInfo->m_BufferInfo, g_Allocs, &newBuffer);
1941 TEST(result >= VK_SUCCESS);
1942
1943 vkBindBufferMemory(g_hDevice, newBuffer, stepInfo.pMoves[i].memory, stepInfo.pMoves[i].offset);
Adam Sawickic467e282019-12-23 16:38:31 +01001944 allocInfo->m_NewBuffer = newBuffer;
Adam Sawickia52012d2019-12-23 15:28:51 +01001945
1946 // Keep track of our pipeline stages that we need to wait/signal on
1947 beginSrcStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1948 finalizeDstStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
1949
1950 beginMemoryBarrier.srcAccessMask |= VK_ACCESS_MEMORY_WRITE_BIT;
1951 beginMemoryBarrier.dstAccessMask |= VK_ACCESS_TRANSFER_READ_BIT;
1952
1953 finalizeMemoryBarrier.srcAccessMask |= VK_ACCESS_TRANSFER_WRITE_BIT;
1954 finalizeMemoryBarrier.dstAccessMask |= VK_ACCESS_MEMORY_READ_BIT;
1955
1956 wantsMemoryBarrier = true;
1957 }
1958 }
1959
1960 if(!beginImageBarriers.empty() || wantsMemoryBarrier)
1961 {
1962 const uint32_t memoryBarrierCount = wantsMemoryBarrier ? 1 : 0;
1963
1964 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, beginSrcStageMask, beginDstStageMask, 0,
1965 memoryBarrierCount, &beginMemoryBarrier,
1966 0, nullptr,
1967 (uint32_t)beginImageBarriers.size(), beginImageBarriers.data());
1968 }
1969
1970 for(uint32_t i = 0; i < stepInfo.moveCount; ++ i)
1971 {
1972 VmaAllocationInfo info;
1973 vmaGetAllocationInfo(g_hAllocator, stepInfo.pMoves[i].allocation, &info);
1974
1975 AllocInfo *allocInfo = (AllocInfo *)info.pUserData;
1976
1977 if(allocInfo->m_Image)
1978 {
1979 std::vector<VkImageCopy> imageCopies;
1980
1981 // Copy all mips of the source image into the target image
1982 VkOffset3D offset = { 0, 0, 0 };
1983 VkExtent3D extent = allocInfo->m_ImageInfo.extent;
1984
1985 VkImageSubresourceLayers subresourceLayers = {
1986 VK_IMAGE_ASPECT_COLOR_BIT,
1987 0,
1988 0, 1
1989 };
1990
1991 for(uint32_t mip = 0; mip < allocInfo->m_ImageInfo.mipLevels; ++ mip)
1992 {
1993 subresourceLayers.mipLevel = mip;
1994
1995 VkImageCopy imageCopy{
1996 subresourceLayers,
1997 offset,
1998 subresourceLayers,
1999 offset,
2000 extent
2001 };
2002
2003 imageCopies.push_back(imageCopy);
2004
2005 extent.width = std::max(uint32_t(1), extent.width >> 1);
2006 extent.height = std::max(uint32_t(1), extent.height >> 1);
2007 extent.depth = std::max(uint32_t(1), extent.depth >> 1);
2008 }
2009
2010 vkCmdCopyImage(
2011 g_hTemporaryCommandBuffer,
2012 allocInfo->m_Image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
Adam Sawickic467e282019-12-23 16:38:31 +01002013 allocInfo->m_NewImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
Adam Sawickia52012d2019-12-23 15:28:51 +01002014 (uint32_t)imageCopies.size(), imageCopies.data());
Adam Sawickia52012d2019-12-23 15:28:51 +01002015 }
2016 else if(allocInfo->m_Buffer)
2017 {
2018 VkBufferCopy region = {
2019 0,
2020 0,
2021 allocInfo->m_BufferInfo.size };
2022
2023 vkCmdCopyBuffer(g_hTemporaryCommandBuffer,
Adam Sawickic467e282019-12-23 16:38:31 +01002024 allocInfo->m_Buffer, allocInfo->m_NewBuffer,
Adam Sawickia52012d2019-12-23 15:28:51 +01002025 1, &region);
Adam Sawickia52012d2019-12-23 15:28:51 +01002026 }
2027 }
2028
Adam Sawickia52012d2019-12-23 15:28:51 +01002029 if(!finalizeImageBarriers.empty() || wantsMemoryBarrier)
2030 {
2031 const uint32_t memoryBarrierCount = wantsMemoryBarrier ? 1 : 0;
2032
2033 vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, finalizeSrcStageMask, finalizeDstStageMask, 0,
2034 memoryBarrierCount, &finalizeMemoryBarrier,
2035 0, nullptr,
2036 (uint32_t)finalizeImageBarriers.size(), finalizeImageBarriers.data());
2037 }
2038}
2039
2040
2041static void TestDefragmentationIncrementalBasic()
2042{
2043 wprintf(L"Test defragmentation incremental basic\n");
Adam Sawickia52012d2019-12-23 15:28:51 +01002044
2045 std::vector<AllocInfo> allocations;
2046
2047 // Create that many allocations to surely fill 3 new blocks of 256 MB.
2048 const std::array<uint32_t, 3> imageSizes = { 256, 512, 1024 };
2049 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
2050 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
2051 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
Adam Sawickic467e282019-12-23 16:38:31 +01002052 const size_t imageCount = totalSize / ((size_t)imageSizes[0] * imageSizes[0] * 4) / 2;
Adam Sawickia52012d2019-12-23 15:28:51 +01002053 const size_t bufCount = (size_t)(totalSize / bufSizeMin) / 2;
2054 const size_t percentToLeave = 30;
2055 RandomNumberGenerator rand = { 234522 };
2056
2057 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
2058 imageInfo.imageType = VK_IMAGE_TYPE_2D;
2059 imageInfo.extent.depth = 1;
2060 imageInfo.mipLevels = 1;
2061 imageInfo.arrayLayers = 1;
2062 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
2063 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
2064 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2065 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2066 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2067
2068 VmaAllocationCreateInfo allocCreateInfo = {};
2069 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2070 allocCreateInfo.flags = 0;
2071
2072 // Create all intended images.
2073 for(size_t i = 0; i < imageCount; ++i)
2074 {
2075 const uint32_t size = imageSizes[rand.Generate() % 3];
2076
2077 imageInfo.extent.width = size;
2078 imageInfo.extent.height = size;
2079
2080 AllocInfo alloc;
2081 alloc.CreateImage(imageInfo, allocCreateInfo, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
2082 alloc.m_StartValue = 0;
2083
2084 allocations.push_back(alloc);
2085 }
2086
2087 // And all buffers
2088 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2089
2090 for(size_t i = 0; i < bufCount; ++i)
2091 {
2092 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
2093 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2094
2095 AllocInfo alloc;
2096 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
2097 alloc.m_StartValue = 0;
2098
2099 allocations.push_back(alloc);
2100 }
2101
2102 // Destroy some percentage of them.
2103 {
2104 const size_t allocationsToDestroy = round_div<size_t>((imageCount + bufCount) * (100 - percentToLeave), 100);
2105 for(size_t i = 0; i < allocationsToDestroy; ++i)
2106 {
2107 const size_t index = rand.Generate() % allocations.size();
2108 allocations[index].Destroy();
2109 allocations.erase(allocations.begin() + index);
2110 }
2111 }
2112
2113 {
2114 // Set our user data pointers. A real application should probably be more clever here
2115 const size_t allocationCount = allocations.size();
2116 for(size_t i = 0; i < allocationCount; ++i)
2117 {
2118 AllocInfo &alloc = allocations[i];
2119 vmaSetAllocationUserData(g_hAllocator, alloc.m_Allocation, &alloc);
2120 }
2121 }
2122
2123 // Fill them with meaningful data.
2124 UploadGpuData(allocations.data(), allocations.size());
2125
2126 wchar_t fileName[MAX_PATH];
2127 swprintf_s(fileName, L"GPU_defragmentation_incremental_basic_A_before.json");
2128 SaveAllocatorStatsToFile(fileName);
2129
2130 // Defragment using GPU only.
2131 {
2132 const size_t allocCount = allocations.size();
2133
2134 std::vector<VmaAllocation> allocationPtrs;
2135
2136 for(size_t i = 0; i < allocCount; ++i)
2137 {
Adam Sawickia52012d2019-12-23 15:28:51 +01002138 allocationPtrs.push_back(allocations[i].m_Allocation);
2139 }
2140
2141 const size_t movableAllocCount = allocationPtrs.size();
2142
2143 VmaDefragmentationInfo2 defragInfo = {};
2144 defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_INCREMENTAL;
2145 defragInfo.allocationCount = (uint32_t)movableAllocCount;
2146 defragInfo.pAllocations = allocationPtrs.data();
2147 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
2148 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
2149
2150 VmaDefragmentationStats stats = {};
2151 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
2152 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
2153 TEST(res >= VK_SUCCESS);
2154
2155 res = VK_NOT_READY;
2156
Adam Sawickic467e282019-12-23 16:38:31 +01002157 std::vector<VmaDefragmentationPassMoveInfo> moveInfo;
Adam Sawickia52012d2019-12-23 15:28:51 +01002158 moveInfo.resize(movableAllocCount);
2159
2160 while(res == VK_NOT_READY)
2161 {
Adam Sawickic467e282019-12-23 16:38:31 +01002162 VmaDefragmentationPassInfo stepInfo = {};
Adam Sawickia52012d2019-12-23 15:28:51 +01002163 stepInfo.pMoves = moveInfo.data();
2164 stepInfo.moveCount = (uint32_t)moveInfo.size();
2165
Adam Sawickic467e282019-12-23 16:38:31 +01002166 res = vmaBeginDefragmentationPass(g_hAllocator, ctx, &stepInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +01002167 TEST(res >= VK_SUCCESS);
2168
2169 BeginSingleTimeCommands();
Adam Sawickic467e282019-12-23 16:38:31 +01002170 std::vector<void*> newHandles;
Adam Sawickia52012d2019-12-23 15:28:51 +01002171 ProcessDefragmentationStepInfo(stepInfo);
2172 EndSingleTimeCommands();
2173
Adam Sawickic467e282019-12-23 16:38:31 +01002174 res = vmaEndDefragmentationPass(g_hAllocator, ctx);
2175
2176 // Destroy old buffers/images and replace them with new handles.
2177 for(size_t i = 0; i < stepInfo.moveCount; ++i)
2178 {
2179 VmaAllocation const alloc = stepInfo.pMoves[i].allocation;
2180 VmaAllocationInfo vmaAllocInfo;
2181 vmaGetAllocationInfo(g_hAllocator, alloc, &vmaAllocInfo);
2182 AllocInfo* allocInfo = (AllocInfo*)vmaAllocInfo.pUserData;
2183 if(allocInfo->m_Buffer)
2184 {
2185 assert(allocInfo->m_NewBuffer && !allocInfo->m_Image && !allocInfo->m_NewImage);
2186 vkDestroyBuffer(g_hDevice, allocInfo->m_Buffer, g_Allocs);
2187 allocInfo->m_Buffer = allocInfo->m_NewBuffer;
2188 allocInfo->m_NewBuffer = VK_NULL_HANDLE;
2189 }
2190 else if(allocInfo->m_Image)
2191 {
2192 assert(allocInfo->m_NewImage && !allocInfo->m_Buffer && !allocInfo->m_NewBuffer);
2193 vkDestroyImage(g_hDevice, allocInfo->m_Image, g_Allocs);
2194 allocInfo->m_Image = allocInfo->m_NewImage;
2195 allocInfo->m_NewImage = VK_NULL_HANDLE;
2196 }
2197 else
2198 assert(0);
2199 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002200 }
2201
2202 TEST(res >= VK_SUCCESS);
2203 vmaDefragmentationEnd(g_hAllocator, ctx);
2204
2205 // If corruption detection is enabled, GPU defragmentation may not work on
2206 // memory types that have this detection active, e.g. on Intel.
2207#if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
2208 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
2209 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
2210#endif
2211 }
2212
2213 //ValidateGpuData(allocations.data(), allocations.size());
2214
2215 swprintf_s(fileName, L"GPU_defragmentation_incremental_basic_B_after.json");
2216 SaveAllocatorStatsToFile(fileName);
2217
Adam Sawickic467e282019-12-23 16:38:31 +01002218 // Destroy all remaining buffers and images.
Adam Sawickia52012d2019-12-23 15:28:51 +01002219 for(size_t i = allocations.size(); i--; )
2220 {
2221 allocations[i].Destroy();
2222 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002223}
2224
2225void TestDefragmentationIncrementalComplex()
2226{
2227 wprintf(L"Test defragmentation incremental complex\n");
Adam Sawickidb4c1632020-07-16 16:41:53 +02002228
Adam Sawickia52012d2019-12-23 15:28:51 +01002229 std::vector<AllocInfo> allocations;
2230
2231 // Create that many allocations to surely fill 3 new blocks of 256 MB.
2232 const std::array<uint32_t, 3> imageSizes = { 256, 512, 1024 };
2233 const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
2234 const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
2235 const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
2236 const size_t imageCount = (size_t)(totalSize / (imageSizes[0] * imageSizes[0] * 4)) / 2;
2237 const size_t bufCount = (size_t)(totalSize / bufSizeMin) / 2;
2238 const size_t percentToLeave = 30;
2239 RandomNumberGenerator rand = { 234522 };
2240
2241 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
2242 imageInfo.imageType = VK_IMAGE_TYPE_2D;
2243 imageInfo.extent.depth = 1;
2244 imageInfo.mipLevels = 1;
2245 imageInfo.arrayLayers = 1;
2246 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
2247 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
2248 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2249 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2250 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2251
2252 VmaAllocationCreateInfo allocCreateInfo = {};
2253 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2254 allocCreateInfo.flags = 0;
2255
2256 // Create all intended images.
2257 for(size_t i = 0; i < imageCount; ++i)
2258 {
2259 const uint32_t size = imageSizes[rand.Generate() % 3];
2260
2261 imageInfo.extent.width = size;
2262 imageInfo.extent.height = size;
2263
2264 AllocInfo alloc;
2265 alloc.CreateImage(imageInfo, allocCreateInfo, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
2266 alloc.m_StartValue = 0;
2267
2268 allocations.push_back(alloc);
2269 }
2270
2271 // And all buffers
2272 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2273
2274 for(size_t i = 0; i < bufCount; ++i)
2275 {
2276 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
2277 bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2278
2279 AllocInfo alloc;
2280 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
2281 alloc.m_StartValue = 0;
2282
2283 allocations.push_back(alloc);
2284 }
2285
2286 // Destroy some percentage of them.
2287 {
2288 const size_t allocationsToDestroy = round_div<size_t>((imageCount + bufCount) * (100 - percentToLeave), 100);
2289 for(size_t i = 0; i < allocationsToDestroy; ++i)
2290 {
2291 const size_t index = rand.Generate() % allocations.size();
2292 allocations[index].Destroy();
2293 allocations.erase(allocations.begin() + index);
2294 }
2295 }
2296
2297 {
2298 // Set our user data pointers. A real application should probably be more clever here
2299 const size_t allocationCount = allocations.size();
2300 for(size_t i = 0; i < allocationCount; ++i)
2301 {
2302 AllocInfo &alloc = allocations[i];
2303 vmaSetAllocationUserData(g_hAllocator, alloc.m_Allocation, &alloc);
2304 }
2305 }
2306
2307 // Fill them with meaningful data.
2308 UploadGpuData(allocations.data(), allocations.size());
2309
2310 wchar_t fileName[MAX_PATH];
2311 swprintf_s(fileName, L"GPU_defragmentation_incremental_complex_A_before.json");
2312 SaveAllocatorStatsToFile(fileName);
2313
2314 std::vector<AllocInfo> additionalAllocations;
2315
2316#define MakeAdditionalAllocation() \
2317 do { \
2318 { \
2319 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16); \
2320 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT; \
2321 \
2322 AllocInfo alloc; \
2323 alloc.CreateBuffer(bufCreateInfo, allocCreateInfo); \
2324 \
2325 additionalAllocations.push_back(alloc); \
2326 } \
2327 } while(0)
2328
2329 // Defragment using GPU only.
2330 {
2331 const size_t allocCount = allocations.size();
2332
2333 std::vector<VmaAllocation> allocationPtrs;
2334
2335 for(size_t i = 0; i < allocCount; ++i)
2336 {
2337 VmaAllocationInfo allocInfo = {};
2338 vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
2339
2340 allocationPtrs.push_back(allocations[i].m_Allocation);
2341 }
2342
2343 const size_t movableAllocCount = allocationPtrs.size();
2344
2345 VmaDefragmentationInfo2 defragInfo = {};
2346 defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_INCREMENTAL;
2347 defragInfo.allocationCount = (uint32_t)movableAllocCount;
2348 defragInfo.pAllocations = allocationPtrs.data();
2349 defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
2350 defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
2351
2352 VmaDefragmentationStats stats = {};
2353 VmaDefragmentationContext ctx = VK_NULL_HANDLE;
2354 VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
2355 TEST(res >= VK_SUCCESS);
2356
2357 res = VK_NOT_READY;
2358
Adam Sawickic467e282019-12-23 16:38:31 +01002359 std::vector<VmaDefragmentationPassMoveInfo> moveInfo;
Adam Sawickia52012d2019-12-23 15:28:51 +01002360 moveInfo.resize(movableAllocCount);
2361
2362 MakeAdditionalAllocation();
2363
2364 while(res == VK_NOT_READY)
2365 {
Adam Sawickic467e282019-12-23 16:38:31 +01002366 VmaDefragmentationPassInfo stepInfo = {};
Adam Sawickia52012d2019-12-23 15:28:51 +01002367 stepInfo.pMoves = moveInfo.data();
2368 stepInfo.moveCount = (uint32_t)moveInfo.size();
2369
Adam Sawickic467e282019-12-23 16:38:31 +01002370 res = vmaBeginDefragmentationPass(g_hAllocator, ctx, &stepInfo);
Adam Sawickia52012d2019-12-23 15:28:51 +01002371 TEST(res >= VK_SUCCESS);
2372
2373 MakeAdditionalAllocation();
2374
2375 BeginSingleTimeCommands();
2376 ProcessDefragmentationStepInfo(stepInfo);
2377 EndSingleTimeCommands();
2378
Adam Sawickic467e282019-12-23 16:38:31 +01002379 res = vmaEndDefragmentationPass(g_hAllocator, ctx);
2380
2381 // Destroy old buffers/images and replace them with new handles.
2382 for(size_t i = 0; i < stepInfo.moveCount; ++i)
2383 {
2384 VmaAllocation const alloc = stepInfo.pMoves[i].allocation;
2385 VmaAllocationInfo vmaAllocInfo;
2386 vmaGetAllocationInfo(g_hAllocator, alloc, &vmaAllocInfo);
2387 AllocInfo* allocInfo = (AllocInfo*)vmaAllocInfo.pUserData;
2388 if(allocInfo->m_Buffer)
2389 {
2390 assert(allocInfo->m_NewBuffer && !allocInfo->m_Image && !allocInfo->m_NewImage);
2391 vkDestroyBuffer(g_hDevice, allocInfo->m_Buffer, g_Allocs);
2392 allocInfo->m_Buffer = allocInfo->m_NewBuffer;
2393 allocInfo->m_NewBuffer = VK_NULL_HANDLE;
2394 }
2395 else if(allocInfo->m_Image)
2396 {
2397 assert(allocInfo->m_NewImage && !allocInfo->m_Buffer && !allocInfo->m_NewBuffer);
2398 vkDestroyImage(g_hDevice, allocInfo->m_Image, g_Allocs);
2399 allocInfo->m_Image = allocInfo->m_NewImage;
2400 allocInfo->m_NewImage = VK_NULL_HANDLE;
2401 }
2402 else
2403 assert(0);
2404 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002405
2406 MakeAdditionalAllocation();
2407 }
2408
2409 TEST(res >= VK_SUCCESS);
2410 vmaDefragmentationEnd(g_hAllocator, ctx);
2411
2412 // If corruption detection is enabled, GPU defragmentation may not work on
2413 // memory types that have this detection active, e.g. on Intel.
2414#if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
2415 TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
2416 TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
2417#endif
2418 }
2419
2420 //ValidateGpuData(allocations.data(), allocations.size());
2421
2422 swprintf_s(fileName, L"GPU_defragmentation_incremental_complex_B_after.json");
2423 SaveAllocatorStatsToFile(fileName);
2424
2425 // Destroy all remaining buffers.
2426 for(size_t i = allocations.size(); i--; )
2427 {
2428 allocations[i].Destroy();
2429 }
2430
2431 for(size_t i = additionalAllocations.size(); i--; )
2432 {
2433 additionalAllocations[i].Destroy();
2434 }
Adam Sawickia52012d2019-12-23 15:28:51 +01002435}
2436
2437
Adam Sawickib8333fb2018-03-13 16:15:53 +01002438static void TestUserData()
2439{
2440 VkResult res;
2441
2442 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2443 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
2444 bufCreateInfo.size = 0x10000;
2445
2446 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
2447 {
2448 // Opaque pointer
2449 {
2450
2451 void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
2452 void* pointerToSomething = &res;
2453
2454 VmaAllocationCreateInfo allocCreateInfo = {};
2455 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2456 allocCreateInfo.pUserData = numberAsPointer;
2457 if(testIndex == 1)
2458 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2459
2460 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2461 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002462 TEST(res == VK_SUCCESS);
2463 TEST(allocInfo.pUserData = numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002464
2465 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002466 TEST(allocInfo.pUserData == numberAsPointer);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002467
2468 vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
2469 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002470 TEST(allocInfo.pUserData == pointerToSomething);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002471
2472 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2473 }
2474
2475 // String
2476 {
2477 const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
2478 const char* name2 = "2";
2479 const size_t name1Len = strlen(name1);
2480
2481 char* name1Buf = new char[name1Len + 1];
2482 strcpy_s(name1Buf, name1Len + 1, name1);
2483
2484 VmaAllocationCreateInfo allocCreateInfo = {};
2485 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2486 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
2487 allocCreateInfo.pUserData = name1Buf;
2488 if(testIndex == 1)
2489 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2490
2491 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2492 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002493 TEST(res == VK_SUCCESS);
2494 TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
2495 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002496
2497 delete[] name1Buf;
2498
2499 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002500 TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002501
2502 vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
2503 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002504 TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002505
2506 vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
2507 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002508 TEST(allocInfo.pUserData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002509
2510 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2511 }
2512 }
2513}
2514
Adam Sawicki370ab182018-11-08 16:31:00 +01002515static void TestInvalidAllocations()
2516{
2517 VkResult res;
2518
2519 VmaAllocationCreateInfo allocCreateInfo = {};
2520 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2521
2522 // Try to allocate 0 bytes.
2523 {
2524 VkMemoryRequirements memReq = {};
2525 memReq.size = 0; // !!!
2526 memReq.alignment = 4;
2527 memReq.memoryTypeBits = UINT32_MAX;
2528 VmaAllocation alloc = VK_NULL_HANDLE;
2529 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
2530 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
2531 }
2532
2533 // Try to create buffer with size = 0.
2534 {
2535 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2536 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2537 bufCreateInfo.size = 0; // !!!
2538 VkBuffer buf = VK_NULL_HANDLE;
2539 VmaAllocation alloc = VK_NULL_HANDLE;
2540 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
2541 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
2542 }
2543
2544 // Try to create image with one dimension = 0.
2545 {
2546 VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2547 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
2548 imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
2549 imageCreateInfo.extent.width = 128;
2550 imageCreateInfo.extent.height = 0; // !!!
2551 imageCreateInfo.extent.depth = 1;
2552 imageCreateInfo.mipLevels = 1;
2553 imageCreateInfo.arrayLayers = 1;
2554 imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2555 imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
2556 imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2557 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
2558 VkImage image = VK_NULL_HANDLE;
2559 VmaAllocation alloc = VK_NULL_HANDLE;
2560 res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
2561 TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
2562 }
2563}
2564
Adam Sawickib8333fb2018-03-13 16:15:53 +01002565static void TestMemoryRequirements()
2566{
2567 VkResult res;
2568 VkBuffer buf;
2569 VmaAllocation alloc;
2570 VmaAllocationInfo allocInfo;
2571
2572 const VkPhysicalDeviceMemoryProperties* memProps;
2573 vmaGetMemoryProperties(g_hAllocator, &memProps);
2574
2575 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2576 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2577 bufInfo.size = 128;
2578
2579 VmaAllocationCreateInfo allocCreateInfo = {};
2580
2581 // No requirements.
2582 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002583 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002584 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2585
2586 // Usage.
2587 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2588 allocCreateInfo.requiredFlags = 0;
2589 allocCreateInfo.preferredFlags = 0;
2590 allocCreateInfo.memoryTypeBits = UINT32_MAX;
2591
2592 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002593 TEST(res == VK_SUCCESS);
2594 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002595 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2596
2597 // Required flags, preferred flags.
2598 allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
2599 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
2600 allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
2601 allocCreateInfo.memoryTypeBits = 0;
2602
2603 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002604 TEST(res == VK_SUCCESS);
2605 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2606 TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002607 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2608
2609 // memoryTypeBits.
2610 const uint32_t memType = allocInfo.memoryType;
2611 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2612 allocCreateInfo.requiredFlags = 0;
2613 allocCreateInfo.preferredFlags = 0;
2614 allocCreateInfo.memoryTypeBits = 1u << memType;
2615
2616 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002617 TEST(res == VK_SUCCESS);
2618 TEST(allocInfo.memoryType == memType);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002619 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2620
2621}
2622
Adam Sawickia1d992f2020-03-02 15:32:10 +01002623static void TestGetAllocatorInfo()
2624{
2625 wprintf(L"Test vnaGetAllocatorInfo\n");
2626
2627 VmaAllocatorInfo allocInfo = {};
2628 vmaGetAllocatorInfo(g_hAllocator, &allocInfo);
2629 TEST(allocInfo.instance == g_hVulkanInstance);
2630 TEST(allocInfo.physicalDevice == g_hPhysicalDevice);
2631 TEST(allocInfo.device == g_hDevice);
2632}
2633
Adam Sawickib8333fb2018-03-13 16:15:53 +01002634static void TestBasics()
2635{
Adam Sawickiaaa1a562020-06-24 17:41:09 +02002636 wprintf(L"Test basics\n");
2637
Adam Sawickib8333fb2018-03-13 16:15:53 +01002638 VkResult res;
2639
Adam Sawickia1d992f2020-03-02 15:32:10 +01002640 TestGetAllocatorInfo();
2641
Adam Sawickib8333fb2018-03-13 16:15:53 +01002642 TestMemoryRequirements();
2643
2644 // Lost allocation
2645 {
2646 VmaAllocation alloc = VK_NULL_HANDLE;
2647 vmaCreateLostAllocation(g_hAllocator, &alloc);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002648 TEST(alloc != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002649
2650 VmaAllocationInfo allocInfo;
2651 vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002652 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
2653 TEST(allocInfo.size == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002654
2655 vmaFreeMemory(g_hAllocator, alloc);
2656 }
2657
2658 // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
2659 {
2660 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2661 bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
2662 bufCreateInfo.size = 128;
2663
2664 VmaAllocationCreateInfo allocCreateInfo = {};
2665 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2666 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
2667
2668 VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
2669 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002670 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002671
2672 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2673
2674 // Same with OWN_MEMORY.
2675 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2676
2677 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002678 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002679
2680 vmaDestroyBuffer(g_hAllocator, buf, alloc);
2681 }
2682
2683 TestUserData();
Adam Sawicki370ab182018-11-08 16:31:00 +01002684
2685 TestInvalidAllocations();
Adam Sawickib8333fb2018-03-13 16:15:53 +01002686}
2687
Adam Sawickiaaa1a562020-06-24 17:41:09 +02002688static void TestAllocationVersusResourceSize()
2689{
2690 wprintf(L"Test allocation versus resource size\n");
2691
2692 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2693 bufCreateInfo.size = 22921; // Prime number
2694 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2695
2696 VmaAllocationCreateInfo allocCreateInfo = {};
2697 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
2698
2699 for(uint32_t i = 0; i < 2; ++i)
2700 {
2701 allocCreateInfo.flags = (i == 1) ? VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT : 0;
2702
2703 AllocInfo info;
2704 info.CreateBuffer(bufCreateInfo, allocCreateInfo);
2705
2706 VmaAllocationInfo allocInfo = {};
2707 vmaGetAllocationInfo(g_hAllocator, info.m_Allocation, &allocInfo);
2708 //wprintf(L" Buffer size = %llu, allocation size = %llu\n", bufCreateInfo.size, allocInfo.size);
2709
2710 // Map and test accessing entire area of the allocation, not only the buffer.
2711 void* mappedPtr = nullptr;
2712 VkResult res = vmaMapMemory(g_hAllocator, info.m_Allocation, &mappedPtr);
2713 TEST(res == VK_SUCCESS);
2714
2715 memset(mappedPtr, 0xCC, (size_t)allocInfo.size);
2716
2717 vmaUnmapMemory(g_hAllocator, info.m_Allocation);
2718
2719 info.Destroy();
2720 }
2721}
2722
Adam Sawickiddcbf8c2019-11-22 15:22:42 +01002723static void TestPool_MinBlockCount()
2724{
2725#if defined(VMA_DEBUG_MARGIN) && VMA_DEBUG_MARGIN > 0
2726 return;
2727#endif
2728
2729 wprintf(L"Test Pool MinBlockCount\n");
2730 VkResult res;
2731
2732 static const VkDeviceSize ALLOC_SIZE = 512ull * 1024;
2733 static const VkDeviceSize BLOCK_SIZE = ALLOC_SIZE * 2; // Each block can fit 2 allocations.
2734
2735 VmaAllocationCreateInfo allocCreateInfo = {};
2736 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_COPY;
2737
2738 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2739 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
2740 bufCreateInfo.size = ALLOC_SIZE;
2741
2742 VmaPoolCreateInfo poolCreateInfo = {};
2743 poolCreateInfo.blockSize = BLOCK_SIZE;
2744 poolCreateInfo.minBlockCount = 2; // At least 2 blocks always present.
2745 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
2746 TEST(res == VK_SUCCESS);
2747
2748 VmaPool pool = VK_NULL_HANDLE;
2749 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
2750 TEST(res == VK_SUCCESS && pool != VK_NULL_HANDLE);
2751
2752 // Check that there are 2 blocks preallocated as requested.
2753 VmaPoolStats begPoolStats = {};
2754 vmaGetPoolStats(g_hAllocator, pool, &begPoolStats);
2755 TEST(begPoolStats.blockCount == 2 && begPoolStats.allocationCount == 0 && begPoolStats.size == BLOCK_SIZE * 2);
2756
2757 // Allocate 5 buffers to create 3 blocks.
2758 static const uint32_t BUF_COUNT = 5;
2759 allocCreateInfo.pool = pool;
2760 std::vector<AllocInfo> allocs(BUF_COUNT);
2761 for(uint32_t i = 0; i < BUF_COUNT; ++i)
2762 {
2763 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &allocs[i].m_Buffer, &allocs[i].m_Allocation, nullptr);
2764 TEST(res == VK_SUCCESS && allocs[i].m_Buffer != VK_NULL_HANDLE && allocs[i].m_Allocation != VK_NULL_HANDLE);
2765 }
2766
2767 // Check that there are really 3 blocks.
2768 VmaPoolStats poolStats2 = {};
2769 vmaGetPoolStats(g_hAllocator, pool, &poolStats2);
2770 TEST(poolStats2.blockCount == 3 && poolStats2.allocationCount == BUF_COUNT && poolStats2.size == BLOCK_SIZE * 3);
2771
2772 // Free two first allocations to make one block empty.
2773 allocs[0].Destroy();
2774 allocs[1].Destroy();
2775
2776 // Check that there are still 3 blocks due to hysteresis.
2777 VmaPoolStats poolStats3 = {};
2778 vmaGetPoolStats(g_hAllocator, pool, &poolStats3);
2779 TEST(poolStats3.blockCount == 3 && poolStats3.allocationCount == BUF_COUNT - 2 && poolStats2.size == BLOCK_SIZE * 3);
2780
2781 // Free the last allocation to make second block empty.
2782 allocs[BUF_COUNT - 1].Destroy();
2783
2784 // Check that there are now 2 blocks only.
2785 VmaPoolStats poolStats4 = {};
2786 vmaGetPoolStats(g_hAllocator, pool, &poolStats4);
2787 TEST(poolStats4.blockCount == 2 && poolStats4.allocationCount == BUF_COUNT - 3 && poolStats4.size == BLOCK_SIZE * 2);
2788
2789 // Cleanup.
2790 for(size_t i = allocs.size(); i--; )
2791 {
2792 allocs[i].Destroy();
2793 }
2794 vmaDestroyPool(g_hAllocator, pool);
2795}
2796
Adam Sawickib8333fb2018-03-13 16:15:53 +01002797void TestHeapSizeLimit()
2798{
Adam Sawickifbaccff2020-03-09 17:09:23 +01002799 const VkDeviceSize HEAP_SIZE_LIMIT = 100ull * 1024 * 1024; // 100 MB
2800 const VkDeviceSize BLOCK_SIZE = 10ull * 1024 * 1024; // 10 MB
Adam Sawickib8333fb2018-03-13 16:15:53 +01002801
2802 VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
2803 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
2804 {
2805 heapSizeLimit[i] = HEAP_SIZE_LIMIT;
2806 }
2807
2808 VmaAllocatorCreateInfo allocatorCreateInfo = {};
2809 allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
2810 allocatorCreateInfo.device = g_hDevice;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002811 allocatorCreateInfo.instance = g_hVulkanInstance;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002812 allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
2813
2814 VmaAllocator hAllocator;
2815 VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002816 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002817
2818 struct Item
2819 {
2820 VkBuffer hBuf;
2821 VmaAllocation hAlloc;
2822 };
2823 std::vector<Item> items;
2824
2825 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2826 bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2827
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002828 // 1. Allocate two blocks of dedicated memory, half the size of BLOCK_SIZE.
2829 VmaAllocationInfo dedicatedAllocInfo;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002830 {
2831 VmaAllocationCreateInfo allocCreateInfo = {};
2832 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2833 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
2834
2835 bufCreateInfo.size = BLOCK_SIZE / 2;
2836
2837 for(size_t i = 0; i < 2; ++i)
2838 {
2839 Item item;
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002840 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &dedicatedAllocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002841 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002842 items.push_back(item);
2843 }
2844 }
2845
2846 // Create pool to make sure allocations must be out of this memory type.
2847 VmaPoolCreateInfo poolCreateInfo = {};
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01002848 poolCreateInfo.memoryTypeIndex = dedicatedAllocInfo.memoryType;
Adam Sawickib8333fb2018-03-13 16:15:53 +01002849 poolCreateInfo.blockSize = BLOCK_SIZE;
2850
2851 VmaPool hPool;
2852 res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002853 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002854
2855 // 2. Allocate normal buffers from all the remaining memory.
2856 {
2857 VmaAllocationCreateInfo allocCreateInfo = {};
2858 allocCreateInfo.pool = hPool;
2859
2860 bufCreateInfo.size = BLOCK_SIZE / 2;
2861
2862 const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
2863 for(size_t i = 0; i < bufCount; ++i)
2864 {
2865 Item item;
2866 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002867 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002868 items.push_back(item);
2869 }
2870 }
2871
2872 // 3. Allocation of one more (even small) buffer should fail.
2873 {
2874 VmaAllocationCreateInfo allocCreateInfo = {};
2875 allocCreateInfo.pool = hPool;
2876
2877 bufCreateInfo.size = 128;
2878
2879 VkBuffer hBuf;
2880 VmaAllocation hAlloc;
2881 res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002882 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01002883 }
2884
2885 // Destroy everything.
2886 for(size_t i = items.size(); i--; )
2887 {
2888 vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
2889 }
2890
2891 vmaDestroyPool(hAllocator, hPool);
2892
2893 vmaDestroyAllocator(hAllocator);
2894}
2895
Adam Sawicki212a4a62018-06-14 15:44:45 +02002896#if VMA_DEBUG_MARGIN
Adam Sawicki73b16652018-06-11 16:39:25 +02002897static void TestDebugMargin()
2898{
2899 if(VMA_DEBUG_MARGIN == 0)
2900 {
2901 return;
2902 }
2903
2904 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
Adam Sawicki212a4a62018-06-14 15:44:45 +02002905 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki73b16652018-06-11 16:39:25 +02002906
2907 VmaAllocationCreateInfo allocCreateInfo = {};
Adam Sawicki212a4a62018-06-14 15:44:45 +02002908 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawicki73b16652018-06-11 16:39:25 +02002909
2910 // Create few buffers of different size.
2911 const size_t BUF_COUNT = 10;
2912 BufferInfo buffers[BUF_COUNT];
2913 VmaAllocationInfo allocInfo[BUF_COUNT];
2914 for(size_t i = 0; i < 10; ++i)
2915 {
2916 bufInfo.size = (VkDeviceSize)(i + 1) * 64;
Adam Sawicki212a4a62018-06-14 15:44:45 +02002917 // Last one will be mapped.
2918 allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
Adam Sawicki73b16652018-06-11 16:39:25 +02002919
2920 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002921 TEST(res == VK_SUCCESS);
Adam Sawicki73b16652018-06-11 16:39:25 +02002922 // Margin is preserved also at the beginning of a block.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002923 TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002924
2925 if(i == BUF_COUNT - 1)
2926 {
2927 // Fill with data.
Adam Sawickib8d34d52018-10-03 17:41:20 +02002928 TEST(allocInfo[i].pMappedData != nullptr);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002929 // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
2930 memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
2931 }
Adam Sawicki73b16652018-06-11 16:39:25 +02002932 }
2933
2934 // Check if their offsets preserve margin between them.
2935 std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
2936 {
2937 if(lhs.deviceMemory != rhs.deviceMemory)
2938 {
2939 return lhs.deviceMemory < rhs.deviceMemory;
2940 }
2941 return lhs.offset < rhs.offset;
2942 });
2943 for(size_t i = 1; i < BUF_COUNT; ++i)
2944 {
2945 if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
2946 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02002947 TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
Adam Sawicki73b16652018-06-11 16:39:25 +02002948 }
2949 }
2950
Adam Sawicki212a4a62018-06-14 15:44:45 +02002951 VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002952 TEST(res == VK_SUCCESS);
Adam Sawicki212a4a62018-06-14 15:44:45 +02002953
Adam Sawicki73b16652018-06-11 16:39:25 +02002954 // Destroy all buffers.
2955 for(size_t i = BUF_COUNT; i--; )
2956 {
2957 vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
2958 }
2959}
Adam Sawicki212a4a62018-06-14 15:44:45 +02002960#endif
Adam Sawicki73b16652018-06-11 16:39:25 +02002961
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002962static void TestLinearAllocator()
2963{
2964 wprintf(L"Test linear allocator\n");
2965
2966 RandomNumberGenerator rand{645332};
2967
2968 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
2969 sampleBufCreateInfo.size = 1024; // Whatever.
2970 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
2971
2972 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
2973 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
2974
2975 VmaPoolCreateInfo poolCreateInfo = {};
2976 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002977 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002978
Adam Sawickiee082772018-06-20 17:45:49 +02002979 poolCreateInfo.blockSize = 1024 * 300;
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002980 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
2981 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
2982
2983 VmaPool pool = nullptr;
2984 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02002985 TEST(res == VK_SUCCESS);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02002986
2987 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
2988
2989 VmaAllocationCreateInfo allocCreateInfo = {};
2990 allocCreateInfo.pool = pool;
2991
2992 constexpr size_t maxBufCount = 100;
2993 std::vector<BufferInfo> bufInfo;
2994
2995 constexpr VkDeviceSize bufSizeMin = 16;
2996 constexpr VkDeviceSize bufSizeMax = 1024;
2997 VmaAllocationInfo allocInfo;
2998 VkDeviceSize prevOffset = 0;
2999
3000 // Test one-time free.
3001 for(size_t i = 0; i < 2; ++i)
3002 {
3003 // Allocate number of buffers of varying size that surely fit into this block.
3004 VkDeviceSize bufSumSize = 0;
3005 for(size_t i = 0; i < maxBufCount; ++i)
3006 {
Adam Sawickifd366b62019-01-24 15:26:43 +01003007 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003008 BufferInfo newBufInfo;
3009 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3010 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003011 TEST(res == VK_SUCCESS);
3012 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003013 bufInfo.push_back(newBufInfo);
3014 prevOffset = allocInfo.offset;
3015 bufSumSize += bufCreateInfo.size;
3016 }
3017
3018 // Validate pool stats.
3019 VmaPoolStats stats;
3020 vmaGetPoolStats(g_hAllocator, pool, &stats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003021 TEST(stats.size == poolCreateInfo.blockSize);
3022 TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
3023 TEST(stats.allocationCount == bufInfo.size());
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003024
3025 // Destroy the buffers in random order.
3026 while(!bufInfo.empty())
3027 {
3028 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3029 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3030 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3031 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3032 }
3033 }
3034
3035 // Test stack.
3036 {
3037 // Allocate number of buffers of varying size that surely fit into this block.
3038 for(size_t i = 0; i < maxBufCount; ++i)
3039 {
Adam Sawickifd366b62019-01-24 15:26:43 +01003040 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003041 BufferInfo newBufInfo;
3042 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3043 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003044 TEST(res == VK_SUCCESS);
3045 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003046 bufInfo.push_back(newBufInfo);
3047 prevOffset = allocInfo.offset;
3048 }
3049
3050 // Destroy few buffers from top of the stack.
3051 for(size_t i = 0; i < maxBufCount / 5; ++i)
3052 {
3053 const BufferInfo& currBufInfo = bufInfo.back();
3054 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3055 bufInfo.pop_back();
3056 }
3057
3058 // Create some more
3059 for(size_t i = 0; i < maxBufCount / 5; ++i)
3060 {
Adam Sawickifd366b62019-01-24 15:26:43 +01003061 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003062 BufferInfo newBufInfo;
3063 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3064 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003065 TEST(res == VK_SUCCESS);
3066 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003067 bufInfo.push_back(newBufInfo);
3068 prevOffset = allocInfo.offset;
3069 }
3070
3071 // Destroy the buffers in reverse order.
3072 while(!bufInfo.empty())
3073 {
3074 const BufferInfo& currBufInfo = bufInfo.back();
3075 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3076 bufInfo.pop_back();
3077 }
3078 }
3079
Adam Sawickiee082772018-06-20 17:45:49 +02003080 // Test ring buffer.
3081 {
3082 // Allocate number of buffers that surely fit into this block.
3083 bufCreateInfo.size = bufSizeMax;
3084 for(size_t i = 0; i < maxBufCount; ++i)
3085 {
3086 BufferInfo newBufInfo;
3087 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3088 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003089 TEST(res == VK_SUCCESS);
3090 TEST(i == 0 || allocInfo.offset > prevOffset);
Adam Sawickiee082772018-06-20 17:45:49 +02003091 bufInfo.push_back(newBufInfo);
3092 prevOffset = allocInfo.offset;
3093 }
3094
3095 // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
3096 const size_t buffersPerIter = maxBufCount / 10 - 1;
3097 const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
3098 for(size_t iter = 0; iter < iterCount; ++iter)
3099 {
3100 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
3101 {
3102 const BufferInfo& currBufInfo = bufInfo.front();
3103 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3104 bufInfo.erase(bufInfo.begin());
3105 }
3106 for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
3107 {
3108 BufferInfo newBufInfo;
3109 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3110 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003111 TEST(res == VK_SUCCESS);
Adam Sawickiee082772018-06-20 17:45:49 +02003112 bufInfo.push_back(newBufInfo);
3113 }
3114 }
3115
3116 // Allocate buffers until we reach out-of-memory.
3117 uint32_t debugIndex = 0;
3118 while(res == VK_SUCCESS)
3119 {
3120 BufferInfo newBufInfo;
3121 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3122 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3123 if(res == VK_SUCCESS)
3124 {
3125 bufInfo.push_back(newBufInfo);
3126 }
3127 else
3128 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003129 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickiee082772018-06-20 17:45:49 +02003130 }
3131 ++debugIndex;
3132 }
3133
3134 // Destroy the buffers in random order.
3135 while(!bufInfo.empty())
3136 {
3137 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3138 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3139 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3140 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3141 }
3142 }
3143
Adam Sawicki680b2252018-08-22 14:47:32 +02003144 // Test double stack.
3145 {
3146 // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
3147 VkDeviceSize prevOffsetLower = 0;
3148 VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
3149 for(size_t i = 0; i < maxBufCount; ++i)
3150 {
3151 const bool upperAddress = (i % 2) != 0;
3152 if(upperAddress)
3153 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3154 else
3155 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003156 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003157 BufferInfo newBufInfo;
3158 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3159 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003160 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02003161 if(upperAddress)
3162 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003163 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003164 prevOffsetUpper = allocInfo.offset;
3165 }
3166 else
3167 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003168 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02003169 prevOffsetLower = allocInfo.offset;
3170 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003171 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003172 bufInfo.push_back(newBufInfo);
3173 }
3174
3175 // Destroy few buffers from top of the stack.
3176 for(size_t i = 0; i < maxBufCount / 5; ++i)
3177 {
3178 const BufferInfo& currBufInfo = bufInfo.back();
3179 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3180 bufInfo.pop_back();
3181 }
3182
3183 // Create some more
3184 for(size_t i = 0; i < maxBufCount / 5; ++i)
3185 {
3186 const bool upperAddress = (i % 2) != 0;
3187 if(upperAddress)
3188 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3189 else
3190 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003191 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003192 BufferInfo newBufInfo;
3193 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3194 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003195 TEST(res == VK_SUCCESS);
Adam Sawicki680b2252018-08-22 14:47:32 +02003196 bufInfo.push_back(newBufInfo);
3197 }
3198
3199 // Destroy the buffers in reverse order.
3200 while(!bufInfo.empty())
3201 {
3202 const BufferInfo& currBufInfo = bufInfo.back();
3203 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3204 bufInfo.pop_back();
3205 }
3206
3207 // Create buffers on both sides until we reach out of memory.
3208 prevOffsetLower = 0;
3209 prevOffsetUpper = poolCreateInfo.blockSize;
3210 res = VK_SUCCESS;
3211 for(size_t i = 0; res == VK_SUCCESS; ++i)
3212 {
3213 const bool upperAddress = (i % 2) != 0;
3214 if(upperAddress)
3215 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3216 else
3217 allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
Adam Sawickifd366b62019-01-24 15:26:43 +01003218 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki680b2252018-08-22 14:47:32 +02003219 BufferInfo newBufInfo;
3220 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3221 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3222 if(res == VK_SUCCESS)
3223 {
3224 if(upperAddress)
3225 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003226 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003227 prevOffsetUpper = allocInfo.offset;
3228 }
3229 else
3230 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003231 TEST(allocInfo.offset >= prevOffsetLower);
Adam Sawicki680b2252018-08-22 14:47:32 +02003232 prevOffsetLower = allocInfo.offset;
3233 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003234 TEST(prevOffsetLower < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003235 bufInfo.push_back(newBufInfo);
3236 }
3237 }
3238
3239 // Destroy the buffers in random order.
3240 while(!bufInfo.empty())
3241 {
3242 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3243 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3244 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3245 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3246 }
3247
3248 // Create buffers on upper side only, constant size, until we reach out of memory.
3249 prevOffsetUpper = poolCreateInfo.blockSize;
3250 res = VK_SUCCESS;
3251 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3252 bufCreateInfo.size = bufSizeMax;
3253 for(size_t i = 0; res == VK_SUCCESS; ++i)
3254 {
3255 BufferInfo newBufInfo;
3256 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3257 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3258 if(res == VK_SUCCESS)
3259 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02003260 TEST(allocInfo.offset < prevOffsetUpper);
Adam Sawicki680b2252018-08-22 14:47:32 +02003261 prevOffsetUpper = allocInfo.offset;
3262 bufInfo.push_back(newBufInfo);
3263 }
3264 }
3265
3266 // Destroy the buffers in reverse order.
3267 while(!bufInfo.empty())
3268 {
3269 const BufferInfo& currBufInfo = bufInfo.back();
3270 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3271 bufInfo.pop_back();
3272 }
3273 }
3274
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003275 // Test ring buffer with lost allocations.
3276 {
3277 // Allocate number of buffers until pool is full.
3278 // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
3279 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
3280 res = VK_SUCCESS;
3281 for(size_t i = 0; res == VK_SUCCESS; ++i)
3282 {
3283 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3284
Adam Sawickifd366b62019-01-24 15:26:43 +01003285 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003286
3287 BufferInfo newBufInfo;
3288 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3289 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3290 if(res == VK_SUCCESS)
3291 bufInfo.push_back(newBufInfo);
3292 }
3293
3294 // Free first half of it.
3295 {
3296 const size_t buffersToDelete = bufInfo.size() / 2;
3297 for(size_t i = 0; i < buffersToDelete; ++i)
3298 {
3299 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3300 }
3301 bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
3302 }
3303
3304 // Allocate number of buffers until pool is full again.
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003305 // This way we make sure ring buffers wraps around, front in in the middle.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003306 res = VK_SUCCESS;
3307 for(size_t i = 0; res == VK_SUCCESS; ++i)
3308 {
3309 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3310
Adam Sawickifd366b62019-01-24 15:26:43 +01003311 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003312
3313 BufferInfo newBufInfo;
3314 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3315 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
3316 if(res == VK_SUCCESS)
3317 bufInfo.push_back(newBufInfo);
3318 }
3319
3320 VkDeviceSize firstNewOffset;
3321 {
3322 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3323
3324 // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
3325 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
3326 bufCreateInfo.size = bufSizeMax;
3327
3328 BufferInfo newBufInfo;
3329 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3330 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003331 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003332 bufInfo.push_back(newBufInfo);
3333 firstNewOffset = allocInfo.offset;
3334
3335 // Make sure at least one buffer from the beginning became lost.
3336 vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003337 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003338 }
3339
Adam Sawickifd366b62019-01-24 15:26:43 +01003340#if 0 // TODO Fix and uncomment. Failing on Intel.
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003341 // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
3342 size_t newCount = 1;
3343 for(;;)
3344 {
3345 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3346
Adam Sawickifd366b62019-01-24 15:26:43 +01003347 bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003348
3349 BufferInfo newBufInfo;
3350 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3351 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickifd366b62019-01-24 15:26:43 +01003352
Adam Sawickib8d34d52018-10-03 17:41:20 +02003353 TEST(res == VK_SUCCESS);
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003354 bufInfo.push_back(newBufInfo);
3355 ++newCount;
3356 if(allocInfo.offset < firstNewOffset)
3357 break;
3358 }
Adam Sawickifd366b62019-01-24 15:26:43 +01003359#endif
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003360
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003361 // Delete buffers that are lost.
3362 for(size_t i = bufInfo.size(); i--; )
3363 {
3364 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
3365 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3366 {
3367 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3368 bufInfo.erase(bufInfo.begin() + i);
3369 }
3370 }
3371
3372 // Test vmaMakePoolAllocationsLost
3373 {
3374 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
3375
Adam Sawicki4d35a5d2019-01-24 15:51:59 +01003376 size_t lostAllocCount = 0;
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003377 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003378 TEST(lostAllocCount > 0);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003379
3380 size_t realLostAllocCount = 0;
3381 for(size_t i = 0; i < bufInfo.size(); ++i)
3382 {
3383 vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
3384 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
3385 ++realLostAllocCount;
3386 }
Adam Sawickib8d34d52018-10-03 17:41:20 +02003387 TEST(realLostAllocCount == lostAllocCount);
Adam Sawicki0ebdf0c2018-08-22 17:02:44 +02003388 }
3389
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02003390 // Destroy all the buffers in forward order.
3391 for(size_t i = 0; i < bufInfo.size(); ++i)
3392 vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
3393 bufInfo.clear();
3394 }
3395
Adam Sawicki70a683e2018-08-24 15:36:32 +02003396 vmaDestroyPool(g_hAllocator, pool);
3397}
Adam Sawickif799c4f2018-08-23 10:40:30 +02003398
Adam Sawicki70a683e2018-08-24 15:36:32 +02003399static void TestLinearAllocatorMultiBlock()
3400{
3401 wprintf(L"Test linear allocator multi block\n");
3402
3403 RandomNumberGenerator rand{345673};
3404
3405 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3406 sampleBufCreateInfo.size = 1024 * 1024;
3407 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3408
3409 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3410 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3411
3412 VmaPoolCreateInfo poolCreateInfo = {};
3413 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3414 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003415 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003416
3417 VmaPool pool = nullptr;
3418 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003419 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003420
3421 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
3422
3423 VmaAllocationCreateInfo allocCreateInfo = {};
3424 allocCreateInfo.pool = pool;
3425
3426 std::vector<BufferInfo> bufInfo;
3427 VmaAllocationInfo allocInfo;
3428
3429 // Test one-time free.
3430 {
3431 // Allocate buffers until we move to a second block.
3432 VkDeviceMemory lastMem = VK_NULL_HANDLE;
3433 for(uint32_t i = 0; ; ++i)
3434 {
3435 BufferInfo newBufInfo;
3436 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3437 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003438 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003439 bufInfo.push_back(newBufInfo);
3440 if(lastMem && allocInfo.deviceMemory != lastMem)
3441 {
3442 break;
3443 }
3444 lastMem = allocInfo.deviceMemory;
3445 }
3446
Adam Sawickib8d34d52018-10-03 17:41:20 +02003447 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003448
3449 // Make sure that pool has now two blocks.
3450 VmaPoolStats poolStats = {};
3451 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003452 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003453
3454 // Destroy all the buffers in random order.
3455 while(!bufInfo.empty())
3456 {
3457 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
3458 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
3459 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3460 bufInfo.erase(bufInfo.begin() + indexToDestroy);
3461 }
3462
3463 // Make sure that pool has now at most one block.
3464 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003465 TEST(poolStats.blockCount <= 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003466 }
3467
3468 // Test stack.
3469 {
3470 // Allocate buffers until we move to a second block.
3471 VkDeviceMemory lastMem = VK_NULL_HANDLE;
3472 for(uint32_t i = 0; ; ++i)
3473 {
3474 BufferInfo newBufInfo;
3475 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3476 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003477 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003478 bufInfo.push_back(newBufInfo);
3479 if(lastMem && allocInfo.deviceMemory != lastMem)
3480 {
3481 break;
3482 }
3483 lastMem = allocInfo.deviceMemory;
3484 }
3485
Adam Sawickib8d34d52018-10-03 17:41:20 +02003486 TEST(bufInfo.size() > 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003487
3488 // Add few more buffers.
3489 for(uint32_t i = 0; i < 5; ++i)
3490 {
3491 BufferInfo newBufInfo;
3492 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3493 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003494 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003495 bufInfo.push_back(newBufInfo);
3496 }
3497
3498 // Make sure that pool has now two blocks.
3499 VmaPoolStats poolStats = {};
3500 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003501 TEST(poolStats.blockCount == 2);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003502
3503 // Delete half of buffers, LIFO.
3504 for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
3505 {
3506 const BufferInfo& currBufInfo = bufInfo.back();
3507 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3508 bufInfo.pop_back();
3509 }
3510
3511 // Add one more buffer.
3512 BufferInfo newBufInfo;
3513 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3514 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003515 TEST(res == VK_SUCCESS);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003516 bufInfo.push_back(newBufInfo);
3517
3518 // Make sure that pool has now one block.
3519 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003520 TEST(poolStats.blockCount == 1);
Adam Sawicki70a683e2018-08-24 15:36:32 +02003521
3522 // Delete all the remaining buffers, LIFO.
3523 while(!bufInfo.empty())
3524 {
3525 const BufferInfo& currBufInfo = bufInfo.back();
3526 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3527 bufInfo.pop_back();
3528 }
Adam Sawickif799c4f2018-08-23 10:40:30 +02003529 }
3530
Adam Sawicki0876c0d2018-06-20 15:18:11 +02003531 vmaDestroyPool(g_hAllocator, pool);
3532}
3533
Adam Sawickifd11d752018-08-22 15:02:10 +02003534static void ManuallyTestLinearAllocator()
3535{
3536 VmaStats origStats;
3537 vmaCalculateStats(g_hAllocator, &origStats);
3538
3539 wprintf(L"Manually test linear allocator\n");
3540
3541 RandomNumberGenerator rand{645332};
3542
3543 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3544 sampleBufCreateInfo.size = 1024; // Whatever.
3545 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
3546
3547 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3548 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3549
3550 VmaPoolCreateInfo poolCreateInfo = {};
3551 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003552 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003553
3554 poolCreateInfo.blockSize = 10 * 1024;
3555 poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3556 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
3557
3558 VmaPool pool = nullptr;
3559 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003560 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003561
3562 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
3563
3564 VmaAllocationCreateInfo allocCreateInfo = {};
3565 allocCreateInfo.pool = pool;
3566
3567 std::vector<BufferInfo> bufInfo;
3568 VmaAllocationInfo allocInfo;
3569 BufferInfo newBufInfo;
3570
3571 // Test double stack.
3572 {
3573 /*
3574 Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
3575 Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
3576
3577 Totally:
3578 1 block allocated
3579 10240 Vulkan bytes
3580 6 new allocations
3581 2256 bytes in allocations
3582 */
3583
3584 bufCreateInfo.size = 32;
3585 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3586 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003587 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003588 bufInfo.push_back(newBufInfo);
3589
3590 bufCreateInfo.size = 1024;
3591 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3592 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003593 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003594 bufInfo.push_back(newBufInfo);
3595
3596 bufCreateInfo.size = 32;
3597 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3598 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003599 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003600 bufInfo.push_back(newBufInfo);
3601
3602 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
3603
3604 bufCreateInfo.size = 128;
3605 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3606 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003607 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003608 bufInfo.push_back(newBufInfo);
3609
3610 bufCreateInfo.size = 1024;
3611 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3612 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003613 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003614 bufInfo.push_back(newBufInfo);
3615
3616 bufCreateInfo.size = 16;
3617 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3618 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003619 TEST(res == VK_SUCCESS);
Adam Sawickifd11d752018-08-22 15:02:10 +02003620 bufInfo.push_back(newBufInfo);
3621
3622 VmaStats currStats;
3623 vmaCalculateStats(g_hAllocator, &currStats);
3624 VmaPoolStats poolStats;
3625 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
3626
3627 char* statsStr = nullptr;
3628 vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
3629
3630 // PUT BREAKPOINT HERE TO CHECK.
3631 // Inspect: currStats versus origStats, poolStats, statsStr.
3632 int I = 0;
3633
3634 vmaFreeStatsString(g_hAllocator, statsStr);
3635
3636 // Destroy the buffers in reverse order.
3637 while(!bufInfo.empty())
3638 {
3639 const BufferInfo& currBufInfo = bufInfo.back();
3640 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
3641 bufInfo.pop_back();
3642 }
3643 }
3644
3645 vmaDestroyPool(g_hAllocator, pool);
3646}
3647
Adam Sawicki80927152018-09-07 17:27:23 +02003648static void BenchmarkAlgorithmsCase(FILE* file,
3649 uint32_t algorithm,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003650 bool empty,
3651 VmaAllocationCreateFlags allocStrategy,
3652 FREE_ORDER freeOrder)
Adam Sawicki0a607132018-08-24 11:18:41 +02003653{
3654 RandomNumberGenerator rand{16223};
3655
3656 const VkDeviceSize bufSizeMin = 32;
3657 const VkDeviceSize bufSizeMax = 1024;
3658 const size_t maxBufCapacity = 10000;
3659 const uint32_t iterationCount = 10;
3660
3661 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3662 sampleBufCreateInfo.size = bufSizeMax;
3663 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
3664
3665 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
3666 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3667
3668 VmaPoolCreateInfo poolCreateInfo = {};
3669 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003670 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003671
3672 poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
Adam Sawicki80927152018-09-07 17:27:23 +02003673 poolCreateInfo.flags |= algorithm;
Adam Sawicki0a607132018-08-24 11:18:41 +02003674 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
3675
3676 VmaPool pool = nullptr;
3677 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003678 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003679
3680 // Buffer created just to get memory requirements. Never bound to any memory.
3681 VkBuffer dummyBuffer = VK_NULL_HANDLE;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003682 res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003683 TEST(res == VK_SUCCESS && dummyBuffer);
Adam Sawicki0a607132018-08-24 11:18:41 +02003684
3685 VkMemoryRequirements memReq = {};
3686 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3687
Adam Sawicki1f84f622019-07-02 13:40:01 +02003688 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawicki0a607132018-08-24 11:18:41 +02003689
3690 VmaAllocationCreateInfo allocCreateInfo = {};
3691 allocCreateInfo.pool = pool;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003692 allocCreateInfo.flags = allocStrategy;
Adam Sawicki0a607132018-08-24 11:18:41 +02003693
3694 VmaAllocation alloc;
3695 std::vector<VmaAllocation> baseAllocations;
3696
3697 if(!empty)
3698 {
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003699 // Make allocations up to 1/3 of pool size.
Adam Sawicki0a607132018-08-24 11:18:41 +02003700 VkDeviceSize totalSize = 0;
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003701 while(totalSize < poolCreateInfo.blockSize / 3)
Adam Sawicki0a607132018-08-24 11:18:41 +02003702 {
Adam Sawicki4d844e22019-01-24 16:21:05 +01003703 // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
3704 // This is theoretically allowed and already uncovered one bug.
Adam Sawicki0a607132018-08-24 11:18:41 +02003705 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
3706 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003707 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003708 baseAllocations.push_back(alloc);
3709 totalSize += memReq.size;
3710 }
3711
3712 // Delete half of them, choose randomly.
3713 size_t allocsToDelete = baseAllocations.size() / 2;
3714 for(size_t i = 0; i < allocsToDelete; ++i)
3715 {
3716 const size_t index = (size_t)rand.Generate() % baseAllocations.size();
3717 vmaFreeMemory(g_hAllocator, baseAllocations[index]);
3718 baseAllocations.erase(baseAllocations.begin() + index);
3719 }
3720 }
3721
3722 // BENCHMARK
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003723 const size_t allocCount = maxBufCapacity / 3;
Adam Sawicki0a607132018-08-24 11:18:41 +02003724 std::vector<VmaAllocation> testAllocations;
3725 testAllocations.reserve(allocCount);
3726 duration allocTotalDuration = duration::zero();
3727 duration freeTotalDuration = duration::zero();
3728 for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
3729 {
3730 // Allocations
3731 time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
3732 for(size_t i = 0; i < allocCount; ++i)
3733 {
3734 memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
3735 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003736 TEST(res == VK_SUCCESS);
Adam Sawicki0a607132018-08-24 11:18:41 +02003737 testAllocations.push_back(alloc);
3738 }
3739 allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
3740
3741 // Deallocations
3742 switch(freeOrder)
3743 {
3744 case FREE_ORDER::FORWARD:
3745 // Leave testAllocations unchanged.
3746 break;
3747 case FREE_ORDER::BACKWARD:
3748 std::reverse(testAllocations.begin(), testAllocations.end());
3749 break;
3750 case FREE_ORDER::RANDOM:
3751 std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
3752 break;
3753 default: assert(0);
3754 }
3755
3756 time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
3757 for(size_t i = 0; i < allocCount; ++i)
3758 vmaFreeMemory(g_hAllocator, testAllocations[i]);
3759 freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
3760
3761 testAllocations.clear();
3762 }
3763
3764 // Delete baseAllocations
3765 while(!baseAllocations.empty())
3766 {
3767 vmaFreeMemory(g_hAllocator, baseAllocations.back());
3768 baseAllocations.pop_back();
3769 }
3770
3771 vmaDestroyPool(g_hAllocator, pool);
3772
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003773 const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
3774 const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
3775
Adam Sawicki80927152018-09-07 17:27:23 +02003776 printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
3777 AlgorithmToStr(algorithm),
Adam Sawicki0667e332018-08-24 17:26:44 +02003778 empty ? "Empty" : "Not empty",
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003779 GetAllocationStrategyName(allocStrategy),
Adam Sawicki0a607132018-08-24 11:18:41 +02003780 FREE_ORDER_NAMES[(size_t)freeOrder],
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003781 allocTotalSeconds,
3782 freeTotalSeconds);
3783
3784 if(file)
3785 {
3786 std::string currTime;
3787 CurrentTimeToStr(currTime);
3788
Adam Sawicki80927152018-09-07 17:27:23 +02003789 fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003790 CODE_DESCRIPTION, currTime.c_str(),
Adam Sawicki80927152018-09-07 17:27:23 +02003791 AlgorithmToStr(algorithm),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003792 empty ? 1 : 0,
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003793 GetAllocationStrategyName(allocStrategy),
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003794 FREE_ORDER_NAMES[(uint32_t)freeOrder],
3795 allocTotalSeconds,
3796 freeTotalSeconds);
3797 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003798}
3799
Adam Sawickie73e9882020-03-20 18:05:42 +01003800static void TestBufferDeviceAddress()
3801{
3802 wprintf(L"Test buffer device address\n");
3803
3804 assert(g_BufferDeviceAddressEnabled);
3805
3806 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3807 bufCreateInfo.size = 0x10000;
3808 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
3809 VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT; // !!!
3810
3811 VmaAllocationCreateInfo allocCreateInfo = {};
3812 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3813
3814 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
3815 {
3816 // 1st is placed, 2nd is dedicated.
3817 if(testIndex == 1)
3818 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3819
3820 BufferInfo bufInfo = {};
3821 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3822 &bufInfo.Buffer, &bufInfo.Allocation, nullptr);
3823 TEST(res == VK_SUCCESS);
3824
3825 VkBufferDeviceAddressInfoEXT bufferDeviceAddressInfo = { VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT };
3826 bufferDeviceAddressInfo.buffer = bufInfo.Buffer;
3827 //assert(g_vkGetBufferDeviceAddressEXT != nullptr);
3828 if(g_vkGetBufferDeviceAddressEXT != nullptr)
3829 {
3830 VkDeviceAddress addr = g_vkGetBufferDeviceAddressEXT(g_hDevice, &bufferDeviceAddressInfo);
3831 TEST(addr != 0);
3832 }
3833
3834 vmaDestroyBuffer(g_hAllocator, bufInfo.Buffer, bufInfo.Allocation);
3835 }
3836}
3837
Adam Sawickif2012052021-01-11 18:04:42 +01003838static void TestMemoryPriority()
3839{
3840 wprintf(L"Test memory priority\n");
3841
3842 assert(VK_EXT_memory_priority_enabled);
3843
3844 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3845 bufCreateInfo.size = 0x10000;
3846 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
3847
3848 VmaAllocationCreateInfo allocCreateInfo = {};
3849 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
3850 allocCreateInfo.priority = 1.f;
3851
3852 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
3853 {
3854 // 1st is placed, 2nd is dedicated.
3855 if(testIndex == 1)
3856 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
3857
3858 BufferInfo bufInfo = {};
3859 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
3860 &bufInfo.Buffer, &bufInfo.Allocation, nullptr);
3861 TEST(res == VK_SUCCESS);
3862
3863 // There is nothing we can do to validate the priority.
3864
3865 vmaDestroyBuffer(g_hAllocator, bufInfo.Buffer, bufInfo.Allocation);
3866 }
3867}
3868
Adam Sawicki80927152018-09-07 17:27:23 +02003869static void BenchmarkAlgorithms(FILE* file)
Adam Sawicki0a607132018-08-24 11:18:41 +02003870{
Adam Sawicki80927152018-09-07 17:27:23 +02003871 wprintf(L"Benchmark algorithms\n");
Adam Sawicki0a607132018-08-24 11:18:41 +02003872
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003873 if(file)
3874 {
3875 fprintf(file,
3876 "Code,Time,"
Adam Sawicki80927152018-09-07 17:27:23 +02003877 "Algorithm,Empty,Allocation strategy,Free order,"
Adam Sawicki33d2ce72018-08-27 13:59:13 +02003878 "Allocation time (s),Deallocation time (s)\n");
3879 }
3880
Adam Sawicki0a607132018-08-24 11:18:41 +02003881 uint32_t freeOrderCount = 1;
3882 if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
3883 freeOrderCount = 3;
3884 else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
3885 freeOrderCount = 2;
3886
3887 const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003888 const uint32_t allocStrategyCount = GetAllocationStrategyCount();
Adam Sawicki0a607132018-08-24 11:18:41 +02003889
3890 for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
3891 {
3892 FREE_ORDER freeOrder = FREE_ORDER::COUNT;
3893 switch(freeOrderIndex)
3894 {
3895 case 0: freeOrder = FREE_ORDER::BACKWARD; break;
3896 case 1: freeOrder = FREE_ORDER::FORWARD; break;
3897 case 2: freeOrder = FREE_ORDER::RANDOM; break;
3898 default: assert(0);
3899 }
3900
3901 for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
3902 {
Adam Sawicki80927152018-09-07 17:27:23 +02003903 for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
Adam Sawicki0a607132018-08-24 11:18:41 +02003904 {
Adam Sawicki80927152018-09-07 17:27:23 +02003905 uint32_t algorithm = 0;
3906 switch(algorithmIndex)
3907 {
3908 case 0:
3909 break;
3910 case 1:
3911 algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
3912 break;
3913 case 2:
3914 algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
3915 break;
3916 default:
3917 assert(0);
3918 }
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003919
Adam Sawicki80927152018-09-07 17:27:23 +02003920 uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003921 for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
3922 {
3923 VmaAllocatorCreateFlags strategy = 0;
Adam Sawicki80927152018-09-07 17:27:23 +02003924 if(currAllocStrategyCount > 1)
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003925 {
3926 switch(allocStrategyIndex)
3927 {
3928 case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
3929 case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
3930 case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
3931 default: assert(0);
3932 }
3933 }
3934
Adam Sawicki80927152018-09-07 17:27:23 +02003935 BenchmarkAlgorithmsCase(
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003936 file,
Adam Sawicki80927152018-09-07 17:27:23 +02003937 algorithm,
Adam Sawicki1f7f8af2018-10-03 17:37:55 +02003938 (emptyIndex == 0), // empty
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02003939 strategy,
3940 freeOrder); // freeOrder
3941 }
Adam Sawicki0a607132018-08-24 11:18:41 +02003942 }
3943 }
3944 }
3945}
3946
Adam Sawickib8333fb2018-03-13 16:15:53 +01003947static void TestPool_SameSize()
3948{
3949 const VkDeviceSize BUF_SIZE = 1024 * 1024;
3950 const size_t BUF_COUNT = 100;
3951 VkResult res;
3952
3953 RandomNumberGenerator rand{123};
3954
3955 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
3956 bufferInfo.size = BUF_SIZE;
3957 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3958
3959 uint32_t memoryTypeBits = UINT32_MAX;
3960 {
3961 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02003962 res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003963 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003964
3965 VkMemoryRequirements memReq;
3966 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
3967 memoryTypeBits = memReq.memoryTypeBits;
3968
Adam Sawicki1f84f622019-07-02 13:40:01 +02003969 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003970 }
3971
3972 VmaAllocationCreateInfo poolAllocInfo = {};
3973 poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
3974 uint32_t memTypeIndex;
3975 res = vmaFindMemoryTypeIndex(
3976 g_hAllocator,
3977 memoryTypeBits,
3978 &poolAllocInfo,
3979 &memTypeIndex);
3980
3981 VmaPoolCreateInfo poolCreateInfo = {};
3982 poolCreateInfo.memoryTypeIndex = memTypeIndex;
3983 poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
3984 poolCreateInfo.minBlockCount = 1;
3985 poolCreateInfo.maxBlockCount = 4;
3986 poolCreateInfo.frameInUseCount = 0;
3987
3988 VmaPool pool;
3989 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02003990 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01003991
Adam Sawickia020fb82019-11-02 14:43:06 +01003992 // Test pool name
3993 {
3994 static const char* const POOL_NAME = "Pool name";
3995 vmaSetPoolName(g_hAllocator, pool, POOL_NAME);
3996
3997 const char* fetchedPoolName = nullptr;
3998 vmaGetPoolName(g_hAllocator, pool, &fetchedPoolName);
3999 TEST(strcmp(fetchedPoolName, POOL_NAME) == 0);
4000
Adam Sawickia020fb82019-11-02 14:43:06 +01004001 vmaSetPoolName(g_hAllocator, pool, nullptr);
4002 }
4003
Adam Sawickib8333fb2018-03-13 16:15:53 +01004004 vmaSetCurrentFrameIndex(g_hAllocator, 1);
4005
4006 VmaAllocationCreateInfo allocInfo = {};
4007 allocInfo.pool = pool;
4008 allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
4009 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
4010
4011 struct BufItem
4012 {
4013 VkBuffer Buf;
4014 VmaAllocation Alloc;
4015 };
4016 std::vector<BufItem> items;
4017
4018 // Fill entire pool.
4019 for(size_t i = 0; i < BUF_COUNT; ++i)
4020 {
4021 BufItem item;
4022 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004023 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004024 items.push_back(item);
4025 }
4026
4027 // Make sure that another allocation would fail.
4028 {
4029 BufItem item;
4030 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004031 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004032 }
4033
4034 // Validate that no buffer is lost. Also check that they are not mapped.
4035 for(size_t i = 0; i < items.size(); ++i)
4036 {
4037 VmaAllocationInfo allocInfo;
4038 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004039 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
4040 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004041 }
4042
4043 // Free some percent of random items.
4044 {
4045 const size_t PERCENT_TO_FREE = 10;
4046 size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
4047 for(size_t i = 0; i < itemsToFree; ++i)
4048 {
4049 size_t index = (size_t)rand.Generate() % items.size();
4050 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
4051 items.erase(items.begin() + index);
4052 }
4053 }
4054
4055 // Randomly allocate and free items.
4056 {
4057 const size_t OPERATION_COUNT = BUF_COUNT;
4058 for(size_t i = 0; i < OPERATION_COUNT; ++i)
4059 {
4060 bool allocate = rand.Generate() % 2 != 0;
4061 if(allocate)
4062 {
4063 if(items.size() < BUF_COUNT)
4064 {
4065 BufItem item;
4066 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004067 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004068 items.push_back(item);
4069 }
4070 }
4071 else // Free
4072 {
4073 if(!items.empty())
4074 {
4075 size_t index = (size_t)rand.Generate() % items.size();
4076 vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
4077 items.erase(items.begin() + index);
4078 }
4079 }
4080 }
4081 }
4082
4083 // Allocate up to maximum.
4084 while(items.size() < BUF_COUNT)
4085 {
4086 BufItem item;
4087 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004088 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004089 items.push_back(item);
4090 }
4091
4092 // Validate that no buffer is lost.
4093 for(size_t i = 0; i < items.size(); ++i)
4094 {
4095 VmaAllocationInfo allocInfo;
4096 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004097 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004098 }
4099
4100 // Next frame.
4101 vmaSetCurrentFrameIndex(g_hAllocator, 2);
4102
4103 // Allocate another BUF_COUNT buffers.
4104 for(size_t i = 0; i < BUF_COUNT; ++i)
4105 {
4106 BufItem item;
4107 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004108 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004109 items.push_back(item);
4110 }
4111
4112 // Make sure the first BUF_COUNT is lost. Delete them.
4113 for(size_t i = 0; i < BUF_COUNT; ++i)
4114 {
4115 VmaAllocationInfo allocInfo;
4116 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004117 TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004118 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4119 }
4120 items.erase(items.begin(), items.begin() + BUF_COUNT);
4121
4122 // Validate that no buffer is lost.
4123 for(size_t i = 0; i < items.size(); ++i)
4124 {
4125 VmaAllocationInfo allocInfo;
4126 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004127 TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004128 }
4129
4130 // Free one item.
4131 vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
4132 items.pop_back();
4133
4134 // Validate statistics.
4135 {
4136 VmaPoolStats poolStats = {};
4137 vmaGetPoolStats(g_hAllocator, pool, &poolStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004138 TEST(poolStats.allocationCount == items.size());
4139 TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
4140 TEST(poolStats.unusedRangeCount == 1);
4141 TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
4142 TEST(poolStats.unusedSize == BUF_SIZE);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004143 }
4144
4145 // Free all remaining items.
4146 for(size_t i = items.size(); i--; )
4147 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4148 items.clear();
4149
4150 // Allocate maximum items again.
4151 for(size_t i = 0; i < BUF_COUNT; ++i)
4152 {
4153 BufItem item;
4154 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004155 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004156 items.push_back(item);
4157 }
4158
4159 // Delete every other item.
4160 for(size_t i = 0; i < BUF_COUNT / 2; ++i)
4161 {
4162 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4163 items.erase(items.begin() + i);
4164 }
4165
4166 // Defragment!
4167 {
4168 std::vector<VmaAllocation> allocationsToDefragment(items.size());
4169 for(size_t i = 0; i < items.size(); ++i)
4170 allocationsToDefragment[i] = items[i].Alloc;
4171
4172 VmaDefragmentationStats defragmentationStats;
4173 res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004174 TEST(res == VK_SUCCESS);
4175 TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004176 }
4177
4178 // Free all remaining items.
4179 for(size_t i = items.size(); i--; )
4180 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4181 items.clear();
4182
4183 ////////////////////////////////////////////////////////////////////////////////
4184 // Test for vmaMakePoolAllocationsLost
4185
4186 // Allocate 4 buffers on frame 10.
4187 vmaSetCurrentFrameIndex(g_hAllocator, 10);
4188 for(size_t i = 0; i < 4; ++i)
4189 {
4190 BufItem item;
4191 res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004192 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004193 items.push_back(item);
4194 }
4195
4196 // Touch first 2 of them on frame 11.
4197 vmaSetCurrentFrameIndex(g_hAllocator, 11);
4198 for(size_t i = 0; i < 2; ++i)
4199 {
4200 VmaAllocationInfo allocInfo;
4201 vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
4202 }
4203
4204 // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
4205 size_t lostCount = 0xDEADC0DE;
4206 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004207 TEST(lostCount == 2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004208
4209 // Make another call. Now 0 should be lost.
4210 vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004211 TEST(lostCount == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004212
4213 // Make another call, with null count. Should not crash.
4214 vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
4215
4216 // END: Free all remaining items.
4217 for(size_t i = items.size(); i--; )
4218 vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
4219
4220 items.clear();
4221
Adam Sawickid2924172018-06-11 12:48:46 +02004222 ////////////////////////////////////////////////////////////////////////////////
4223 // Test for allocation too large for pool
4224
4225 {
4226 VmaAllocationCreateInfo allocCreateInfo = {};
4227 allocCreateInfo.pool = pool;
4228
4229 VkMemoryRequirements memReq;
4230 memReq.memoryTypeBits = UINT32_MAX;
4231 memReq.alignment = 1;
4232 memReq.size = poolCreateInfo.blockSize + 4;
4233
4234 VmaAllocation alloc = nullptr;
4235 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004236 TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
Adam Sawickid2924172018-06-11 12:48:46 +02004237 }
4238
Adam Sawickib8333fb2018-03-13 16:15:53 +01004239 vmaDestroyPool(g_hAllocator, pool);
4240}
4241
Adam Sawickie44c6262018-06-15 14:30:39 +02004242static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
4243{
4244 const uint8_t* pBytes = (const uint8_t*)pMemory;
4245 for(size_t i = 0; i < size; ++i)
4246 {
4247 if(pBytes[i] != pattern)
4248 {
4249 return false;
4250 }
4251 }
4252 return true;
4253}
4254
4255static void TestAllocationsInitialization()
4256{
4257 VkResult res;
4258
4259 const size_t BUF_SIZE = 1024;
4260
4261 // Create pool.
4262
4263 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4264 bufInfo.size = BUF_SIZE;
4265 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4266
4267 VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
4268 dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
4269
4270 VmaPoolCreateInfo poolCreateInfo = {};
4271 poolCreateInfo.blockSize = BUF_SIZE * 10;
4272 poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
4273 poolCreateInfo.maxBlockCount = 1;
4274 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004275 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004276
4277 VmaAllocationCreateInfo bufAllocCreateInfo = {};
4278 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004279 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004280
4281 // Create one persistently mapped buffer to keep memory of this block mapped,
4282 // so that pointer to mapped data will remain (more or less...) valid even
4283 // after destruction of other allocations.
4284
4285 bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
4286 VkBuffer firstBuf;
4287 VmaAllocation firstAlloc;
4288 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004289 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004290
4291 // Test buffers.
4292
4293 for(uint32_t i = 0; i < 2; ++i)
4294 {
4295 const bool persistentlyMapped = i == 0;
4296 bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
4297 VkBuffer buf;
4298 VmaAllocation alloc;
4299 VmaAllocationInfo allocInfo;
4300 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004301 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004302
4303 void* pMappedData;
4304 if(!persistentlyMapped)
4305 {
4306 res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004307 TEST(res == VK_SUCCESS);
Adam Sawickie44c6262018-06-15 14:30:39 +02004308 }
4309 else
4310 {
4311 pMappedData = allocInfo.pMappedData;
4312 }
4313
4314 // Validate initialized content
4315 bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004316 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02004317
4318 if(!persistentlyMapped)
4319 {
4320 vmaUnmapMemory(g_hAllocator, alloc);
4321 }
4322
4323 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4324
4325 // Validate freed content
4326 valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004327 TEST(valid);
Adam Sawickie44c6262018-06-15 14:30:39 +02004328 }
4329
4330 vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
4331 vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
4332}
4333
Adam Sawickib8333fb2018-03-13 16:15:53 +01004334static void TestPool_Benchmark(
4335 PoolTestResult& outResult,
4336 const PoolTestConfig& config)
4337{
Adam Sawickib8d34d52018-10-03 17:41:20 +02004338 TEST(config.ThreadCount > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004339
4340 RandomNumberGenerator mainRand{config.RandSeed};
4341
4342 uint32_t allocationSizeProbabilitySum = std::accumulate(
4343 config.AllocationSizes.begin(),
4344 config.AllocationSizes.end(),
4345 0u,
4346 [](uint32_t sum, const AllocationSize& allocSize) {
4347 return sum + allocSize.Probability;
4348 });
4349
4350 VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4351 bufferInfo.size = 256; // Whatever.
4352 bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4353
4354 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4355 imageInfo.imageType = VK_IMAGE_TYPE_2D;
4356 imageInfo.extent.width = 256; // Whatever.
4357 imageInfo.extent.height = 256; // Whatever.
4358 imageInfo.extent.depth = 1;
4359 imageInfo.mipLevels = 1;
4360 imageInfo.arrayLayers = 1;
4361 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4362 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
4363 imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
4364 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
4365 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4366
4367 uint32_t bufferMemoryTypeBits = UINT32_MAX;
4368 {
4369 VkBuffer dummyBuffer;
Adam Sawicki1f84f622019-07-02 13:40:01 +02004370 VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004371 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004372
4373 VkMemoryRequirements memReq;
4374 vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
4375 bufferMemoryTypeBits = memReq.memoryTypeBits;
4376
Adam Sawicki1f84f622019-07-02 13:40:01 +02004377 vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004378 }
4379
4380 uint32_t imageMemoryTypeBits = UINT32_MAX;
4381 {
4382 VkImage dummyImage;
Adam Sawicki1f84f622019-07-02 13:40:01 +02004383 VkResult res = vkCreateImage(g_hDevice, &imageInfo, g_Allocs, &dummyImage);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004384 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004385
4386 VkMemoryRequirements memReq;
4387 vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
4388 imageMemoryTypeBits = memReq.memoryTypeBits;
4389
Adam Sawicki1f84f622019-07-02 13:40:01 +02004390 vkDestroyImage(g_hDevice, dummyImage, g_Allocs);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004391 }
4392
4393 uint32_t memoryTypeBits = 0;
4394 if(config.UsesBuffers() && config.UsesImages())
4395 {
4396 memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
4397 if(memoryTypeBits == 0)
4398 {
4399 PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
4400 return;
4401 }
4402 }
4403 else if(config.UsesBuffers())
4404 memoryTypeBits = bufferMemoryTypeBits;
4405 else if(config.UsesImages())
4406 memoryTypeBits = imageMemoryTypeBits;
4407 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02004408 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004409
4410 VmaPoolCreateInfo poolCreateInfo = {};
4411 poolCreateInfo.memoryTypeIndex = 0;
4412 poolCreateInfo.minBlockCount = 1;
4413 poolCreateInfo.maxBlockCount = 1;
4414 poolCreateInfo.blockSize = config.PoolSize;
4415 poolCreateInfo.frameInUseCount = 1;
4416
4417 VmaAllocationCreateInfo dummyAllocCreateInfo = {};
4418 dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
4419 vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4420
4421 VmaPool pool;
4422 VkResult res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02004423 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004424
4425 // Start time measurement - after creating pool and initializing data structures.
4426 time_point timeBeg = std::chrono::high_resolution_clock::now();
4427
4428 ////////////////////////////////////////////////////////////////////////////////
4429 // ThreadProc
4430 auto ThreadProc = [&](
4431 PoolTestThreadResult* outThreadResult,
4432 uint32_t randSeed,
4433 HANDLE frameStartEvent,
4434 HANDLE frameEndEvent) -> void
4435 {
4436 RandomNumberGenerator threadRand{randSeed};
4437
4438 outThreadResult->AllocationTimeMin = duration::max();
4439 outThreadResult->AllocationTimeSum = duration::zero();
4440 outThreadResult->AllocationTimeMax = duration::min();
4441 outThreadResult->DeallocationTimeMin = duration::max();
4442 outThreadResult->DeallocationTimeSum = duration::zero();
4443 outThreadResult->DeallocationTimeMax = duration::min();
4444 outThreadResult->AllocationCount = 0;
4445 outThreadResult->DeallocationCount = 0;
4446 outThreadResult->LostAllocationCount = 0;
4447 outThreadResult->LostAllocationTotalSize = 0;
4448 outThreadResult->FailedAllocationCount = 0;
4449 outThreadResult->FailedAllocationTotalSize = 0;
4450
4451 struct Item
4452 {
4453 VkDeviceSize BufferSize;
4454 VkExtent2D ImageSize;
4455 VkBuffer Buf;
4456 VkImage Image;
4457 VmaAllocation Alloc;
4458
4459 VkDeviceSize CalcSizeBytes() const
4460 {
4461 return BufferSize +
4462 ImageSize.width * ImageSize.height * 4;
4463 }
4464 };
4465 std::vector<Item> unusedItems, usedItems;
4466
4467 const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
4468
4469 // Create all items - all unused, not yet allocated.
4470 for(size_t i = 0; i < threadTotalItemCount; ++i)
4471 {
4472 Item item = {};
4473
4474 uint32_t allocSizeIndex = 0;
4475 uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
4476 while(r >= config.AllocationSizes[allocSizeIndex].Probability)
4477 r -= config.AllocationSizes[allocSizeIndex++].Probability;
4478
4479 const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
4480 if(allocSize.BufferSizeMax > 0)
4481 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004482 TEST(allocSize.BufferSizeMin > 0);
4483 TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004484 if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
4485 item.BufferSize = allocSize.BufferSizeMin;
4486 else
4487 {
4488 item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
4489 item.BufferSize = item.BufferSize / 16 * 16;
4490 }
4491 }
4492 else
4493 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004494 TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004495 if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
4496 item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
4497 else
4498 {
4499 item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
4500 item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
4501 }
4502 }
4503
4504 unusedItems.push_back(item);
4505 }
4506
4507 auto Allocate = [&](Item& item) -> VkResult
4508 {
4509 VmaAllocationCreateInfo allocCreateInfo = {};
4510 allocCreateInfo.pool = pool;
4511 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
4512 VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
4513
4514 if(item.BufferSize)
4515 {
4516 bufferInfo.size = item.BufferSize;
4517 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4518 return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
4519 }
4520 else
4521 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02004522 TEST(item.ImageSize.width && item.ImageSize.height);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004523
4524 imageInfo.extent.width = item.ImageSize.width;
4525 imageInfo.extent.height = item.ImageSize.height;
4526 PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4527 return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
4528 }
4529 };
4530
4531 ////////////////////////////////////////////////////////////////////////////////
4532 // Frames
4533 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
4534 {
4535 WaitForSingleObject(frameStartEvent, INFINITE);
4536
4537 // Always make some percent of used bufs unused, to choose different used ones.
4538 const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
4539 for(size_t i = 0; i < bufsToMakeUnused; ++i)
4540 {
4541 size_t index = threadRand.Generate() % usedItems.size();
4542 unusedItems.push_back(usedItems[index]);
4543 usedItems.erase(usedItems.begin() + index);
4544 }
4545
4546 // Determine which bufs we want to use in this frame.
4547 const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
4548 / config.ThreadCount;
Adam Sawickib8d34d52018-10-03 17:41:20 +02004549 TEST(usedBufCount < usedItems.size() + unusedItems.size());
Adam Sawickib8333fb2018-03-13 16:15:53 +01004550 // Move some used to unused.
4551 while(usedBufCount < usedItems.size())
4552 {
4553 size_t index = threadRand.Generate() % usedItems.size();
4554 unusedItems.push_back(usedItems[index]);
4555 usedItems.erase(usedItems.begin() + index);
4556 }
4557 // Move some unused to used.
4558 while(usedBufCount > usedItems.size())
4559 {
4560 size_t index = threadRand.Generate() % unusedItems.size();
4561 usedItems.push_back(unusedItems[index]);
4562 unusedItems.erase(unusedItems.begin() + index);
4563 }
4564
4565 uint32_t touchExistingCount = 0;
4566 uint32_t touchLostCount = 0;
4567 uint32_t createSucceededCount = 0;
4568 uint32_t createFailedCount = 0;
4569
4570 // Touch all used bufs. If not created or lost, allocate.
4571 for(size_t i = 0; i < usedItems.size(); ++i)
4572 {
4573 Item& item = usedItems[i];
4574 // Not yet created.
4575 if(item.Alloc == VK_NULL_HANDLE)
4576 {
4577 res = Allocate(item);
4578 ++outThreadResult->AllocationCount;
4579 if(res != VK_SUCCESS)
4580 {
4581 item.Alloc = VK_NULL_HANDLE;
4582 item.Buf = VK_NULL_HANDLE;
4583 ++outThreadResult->FailedAllocationCount;
4584 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
4585 ++createFailedCount;
4586 }
4587 else
4588 ++createSucceededCount;
4589 }
4590 else
4591 {
4592 // Touch.
4593 VmaAllocationInfo allocInfo;
4594 vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
4595 // Lost.
4596 if(allocInfo.deviceMemory == VK_NULL_HANDLE)
4597 {
4598 ++touchLostCount;
4599
4600 // Destroy.
4601 {
4602 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4603 if(item.Buf)
4604 vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
4605 else
4606 vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
4607 ++outThreadResult->DeallocationCount;
4608 }
4609 item.Alloc = VK_NULL_HANDLE;
4610 item.Buf = VK_NULL_HANDLE;
4611
4612 ++outThreadResult->LostAllocationCount;
4613 outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
4614
4615 // Recreate.
4616 res = Allocate(item);
4617 ++outThreadResult->AllocationCount;
4618 // Creation failed.
4619 if(res != VK_SUCCESS)
4620 {
4621 ++outThreadResult->FailedAllocationCount;
4622 outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
4623 ++createFailedCount;
4624 }
4625 else
4626 ++createSucceededCount;
4627 }
4628 else
4629 ++touchExistingCount;
4630 }
4631 }
4632
4633 /*
4634 printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
4635 randSeed, frameIndex,
4636 touchExistingCount, touchLostCount,
4637 createSucceededCount, createFailedCount);
4638 */
4639
4640 SetEvent(frameEndEvent);
4641 }
4642
4643 // Free all remaining items.
4644 for(size_t i = usedItems.size(); i--; )
4645 {
4646 PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
4647 if(usedItems[i].Buf)
4648 vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
4649 else
4650 vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
4651 ++outThreadResult->DeallocationCount;
4652 }
4653 for(size_t i = unusedItems.size(); i--; )
4654 {
4655 PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
4656 if(unusedItems[i].Buf)
4657 vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
4658 else
4659 vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
4660 ++outThreadResult->DeallocationCount;
4661 }
4662 };
4663
4664 // Launch threads.
4665 uint32_t threadRandSeed = mainRand.Generate();
4666 std::vector<HANDLE> frameStartEvents{config.ThreadCount};
4667 std::vector<HANDLE> frameEndEvents{config.ThreadCount};
4668 std::vector<std::thread> bkgThreads;
4669 std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
4670 for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4671 {
4672 frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
4673 frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
4674 bkgThreads.emplace_back(std::bind(
4675 ThreadProc,
4676 &threadResults[threadIndex],
4677 threadRandSeed + threadIndex,
4678 frameStartEvents[threadIndex],
4679 frameEndEvents[threadIndex]));
4680 }
4681
4682 // Execute frames.
Adam Sawickib8d34d52018-10-03 17:41:20 +02004683 TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01004684 for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
4685 {
4686 vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
4687 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4688 SetEvent(frameStartEvents[threadIndex]);
4689 WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
4690 }
4691
4692 // Wait for threads finished
4693 for(size_t i = 0; i < bkgThreads.size(); ++i)
4694 {
4695 bkgThreads[i].join();
4696 CloseHandle(frameEndEvents[i]);
4697 CloseHandle(frameStartEvents[i]);
4698 }
4699 bkgThreads.clear();
4700
4701 // Finish time measurement - before destroying pool.
4702 outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
4703
4704 vmaDestroyPool(g_hAllocator, pool);
4705
4706 outResult.AllocationTimeMin = duration::max();
4707 outResult.AllocationTimeAvg = duration::zero();
4708 outResult.AllocationTimeMax = duration::min();
4709 outResult.DeallocationTimeMin = duration::max();
4710 outResult.DeallocationTimeAvg = duration::zero();
4711 outResult.DeallocationTimeMax = duration::min();
4712 outResult.LostAllocationCount = 0;
4713 outResult.LostAllocationTotalSize = 0;
4714 outResult.FailedAllocationCount = 0;
4715 outResult.FailedAllocationTotalSize = 0;
4716 size_t allocationCount = 0;
4717 size_t deallocationCount = 0;
4718 for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
4719 {
4720 const PoolTestThreadResult& threadResult = threadResults[threadIndex];
4721 outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
4722 outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
4723 outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
4724 outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
4725 outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
4726 outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
4727 allocationCount += threadResult.AllocationCount;
4728 deallocationCount += threadResult.DeallocationCount;
4729 outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
4730 outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
4731 outResult.LostAllocationCount += threadResult.LostAllocationCount;
4732 outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
4733 }
4734 if(allocationCount)
4735 outResult.AllocationTimeAvg /= allocationCount;
4736 if(deallocationCount)
4737 outResult.DeallocationTimeAvg /= deallocationCount;
4738}
4739
4740static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
4741{
4742 if(ptr1 < ptr2)
4743 return ptr1 + size1 > ptr2;
4744 else if(ptr2 < ptr1)
4745 return ptr2 + size2 > ptr1;
4746 else
4747 return true;
4748}
4749
Adam Sawickiefa88c42019-11-18 16:33:56 +01004750static void TestMemoryUsage()
4751{
4752 wprintf(L"Testing memory usage:\n");
4753
Adam Sawicki69185552019-11-18 17:03:34 +01004754 static const VmaMemoryUsage lastUsage = VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED;
Adam Sawickiefa88c42019-11-18 16:33:56 +01004755 for(uint32_t usage = 0; usage <= lastUsage; ++usage)
4756 {
4757 switch(usage)
4758 {
4759 case VMA_MEMORY_USAGE_UNKNOWN: printf(" VMA_MEMORY_USAGE_UNKNOWN:\n"); break;
4760 case VMA_MEMORY_USAGE_GPU_ONLY: printf(" VMA_MEMORY_USAGE_GPU_ONLY:\n"); break;
4761 case VMA_MEMORY_USAGE_CPU_ONLY: printf(" VMA_MEMORY_USAGE_CPU_ONLY:\n"); break;
4762 case VMA_MEMORY_USAGE_CPU_TO_GPU: printf(" VMA_MEMORY_USAGE_CPU_TO_GPU:\n"); break;
4763 case VMA_MEMORY_USAGE_GPU_TO_CPU: printf(" VMA_MEMORY_USAGE_GPU_TO_CPU:\n"); break;
4764 case VMA_MEMORY_USAGE_CPU_COPY: printf(" VMA_MEMORY_USAGE_CPU_COPY:\n"); break;
Adam Sawicki69185552019-11-18 17:03:34 +01004765 case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED: printf(" VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED:\n"); break;
Adam Sawickiefa88c42019-11-18 16:33:56 +01004766 default: assert(0);
4767 }
4768
4769 auto printResult = [](const char* testName, VkResult res, uint32_t memoryTypeBits, uint32_t memoryTypeIndex)
4770 {
4771 if(res == VK_SUCCESS)
4772 printf(" %s: memoryTypeBits=0x%X, memoryTypeIndex=%u\n", testName, memoryTypeBits, memoryTypeIndex);
4773 else
4774 printf(" %s: memoryTypeBits=0x%X, FAILED with res=%d\n", testName, memoryTypeBits, (int32_t)res);
4775 };
4776
4777 // 1: Buffer for copy
4778 {
4779 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4780 bufCreateInfo.size = 65536;
4781 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
4782
4783 VkBuffer buf = VK_NULL_HANDLE;
4784 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
4785 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
4786
4787 VkMemoryRequirements memReq = {};
4788 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
4789
4790 VmaAllocationCreateInfo allocCreateInfo = {};
4791 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4792 VmaAllocation alloc = VK_NULL_HANDLE;
4793 VmaAllocationInfo allocInfo = {};
4794 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
4795 if(res == VK_SUCCESS)
4796 {
4797 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4798 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
4799 TEST(res == VK_SUCCESS);
4800 }
4801 printResult("Buffer TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
4802 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4803 }
4804
4805 // 2: Vertex buffer
4806 {
4807 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4808 bufCreateInfo.size = 65536;
4809 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
4810
4811 VkBuffer buf = VK_NULL_HANDLE;
4812 VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
4813 TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
4814
4815 VkMemoryRequirements memReq = {};
4816 vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
4817
4818 VmaAllocationCreateInfo allocCreateInfo = {};
4819 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4820 VmaAllocation alloc = VK_NULL_HANDLE;
4821 VmaAllocationInfo allocInfo = {};
4822 res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
4823 if(res == VK_SUCCESS)
4824 {
4825 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4826 res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
4827 TEST(res == VK_SUCCESS);
4828 }
4829 printResult("Buffer TRANSFER_DST + VERTEX_BUFFER", res, memReq.memoryTypeBits, allocInfo.memoryType);
4830 vmaDestroyBuffer(g_hAllocator, buf, alloc);
4831 }
4832
4833 // 3: Image for copy, OPTIMAL
4834 {
4835 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4836 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4837 imgCreateInfo.extent.width = 256;
4838 imgCreateInfo.extent.height = 256;
4839 imgCreateInfo.extent.depth = 1;
4840 imgCreateInfo.mipLevels = 1;
4841 imgCreateInfo.arrayLayers = 1;
4842 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4843 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4844 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4845 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
4846 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4847
4848 VkImage img = VK_NULL_HANDLE;
4849 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4850 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4851
4852 VkMemoryRequirements memReq = {};
4853 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4854
4855 VmaAllocationCreateInfo allocCreateInfo = {};
4856 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4857 VmaAllocation alloc = VK_NULL_HANDLE;
4858 VmaAllocationInfo allocInfo = {};
4859 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4860 if(res == VK_SUCCESS)
4861 {
4862 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4863 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4864 TEST(res == VK_SUCCESS);
4865 }
4866 printResult("Image OPTIMAL TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
4867
4868 vmaDestroyImage(g_hAllocator, img, alloc);
4869 }
4870
4871 // 4: Image SAMPLED, OPTIMAL
4872 {
4873 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4874 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4875 imgCreateInfo.extent.width = 256;
4876 imgCreateInfo.extent.height = 256;
4877 imgCreateInfo.extent.depth = 1;
4878 imgCreateInfo.mipLevels = 1;
4879 imgCreateInfo.arrayLayers = 1;
4880 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4881 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4882 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4883 imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
4884 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4885
4886 VkImage img = VK_NULL_HANDLE;
4887 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4888 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4889
4890 VkMemoryRequirements memReq = {};
4891 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4892
4893 VmaAllocationCreateInfo allocCreateInfo = {};
4894 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4895 VmaAllocation alloc = VK_NULL_HANDLE;
4896 VmaAllocationInfo allocInfo = {};
4897 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4898 if(res == VK_SUCCESS)
4899 {
4900 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4901 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4902 TEST(res == VK_SUCCESS);
4903 }
4904 printResult("Image OPTIMAL TRANSFER_DST + SAMPLED", res, memReq.memoryTypeBits, allocInfo.memoryType);
4905 vmaDestroyImage(g_hAllocator, img, alloc);
4906 }
4907
4908 // 5: Image COLOR_ATTACHMENT, OPTIMAL
4909 {
4910 VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
4911 imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
4912 imgCreateInfo.extent.width = 256;
4913 imgCreateInfo.extent.height = 256;
4914 imgCreateInfo.extent.depth = 1;
4915 imgCreateInfo.mipLevels = 1;
4916 imgCreateInfo.arrayLayers = 1;
4917 imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
4918 imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
4919 imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
4920 imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4921 imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
4922
4923 VkImage img = VK_NULL_HANDLE;
4924 VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
4925 TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
4926
4927 VkMemoryRequirements memReq = {};
4928 vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
4929
4930 VmaAllocationCreateInfo allocCreateInfo = {};
4931 allocCreateInfo.usage = (VmaMemoryUsage)usage;
4932 VmaAllocation alloc = VK_NULL_HANDLE;
4933 VmaAllocationInfo allocInfo = {};
4934 res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
4935 if(res == VK_SUCCESS)
4936 {
4937 TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
4938 res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
4939 TEST(res == VK_SUCCESS);
4940 }
4941 printResult("Image OPTIMAL SAMPLED + COLOR_ATTACHMENT", res, memReq.memoryTypeBits, allocInfo.memoryType);
4942 vmaDestroyImage(g_hAllocator, img, alloc);
4943 }
4944 }
4945}
4946
Adam Sawicki50882502020-02-07 16:51:31 +01004947static uint32_t FindDeviceCoherentMemoryTypeBits()
4948{
4949 VkPhysicalDeviceMemoryProperties memProps;
4950 vkGetPhysicalDeviceMemoryProperties(g_hPhysicalDevice, &memProps);
4951
4952 uint32_t memTypeBits = 0;
4953 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
4954 {
4955 if(memProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD)
4956 memTypeBits |= 1u << i;
4957 }
4958 return memTypeBits;
4959}
4960
4961static void TestDeviceCoherentMemory()
4962{
4963 if(!VK_AMD_device_coherent_memory_enabled)
4964 return;
4965
4966 uint32_t deviceCoherentMemoryTypeBits = FindDeviceCoherentMemoryTypeBits();
4967 // Extension is enabled, feature is enabled, and the device still doesn't support any such memory type?
4968 // OK then, so it's just fake!
4969 if(deviceCoherentMemoryTypeBits == 0)
4970 return;
4971
4972 wprintf(L"Testing device coherent memory...\n");
4973
4974 // 1. Try to allocate buffer from a memory type that is DEVICE_COHERENT.
4975
4976 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
4977 bufCreateInfo.size = 0x10000;
4978 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4979
4980 VmaAllocationCreateInfo allocCreateInfo = {};
4981 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
4982 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD;
4983
4984 AllocInfo alloc = {};
4985 VmaAllocationInfo allocInfo = {};
4986 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &alloc.m_Buffer, &alloc.m_Allocation, &allocInfo);
4987
4988 // Make sure it succeeded and was really created in such memory type.
4989 TEST(res == VK_SUCCESS);
4990 TEST((1u << allocInfo.memoryType) & deviceCoherentMemoryTypeBits);
4991
4992 alloc.Destroy();
4993
4994 // 2. Try to create a pool in such memory type.
4995 {
4996 VmaPoolCreateInfo poolCreateInfo = {};
4997
4998 res = vmaFindMemoryTypeIndex(g_hAllocator, UINT32_MAX, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
4999 TEST(res == VK_SUCCESS);
5000 TEST((1u << poolCreateInfo.memoryTypeIndex) & deviceCoherentMemoryTypeBits);
5001
5002 VmaPool pool = VK_NULL_HANDLE;
5003 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
5004 TEST(res == VK_SUCCESS);
5005
5006 vmaDestroyPool(g_hAllocator, pool);
5007 }
5008
5009 // 3. Try the same with a local allocator created without VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT.
5010
5011 VmaAllocatorCreateInfo allocatorCreateInfo = {};
5012 SetAllocatorCreateInfo(allocatorCreateInfo);
5013 allocatorCreateInfo.flags &= ~VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT;
5014
5015 VmaAllocator localAllocator = VK_NULL_HANDLE;
5016 res = vmaCreateAllocator(&allocatorCreateInfo, &localAllocator);
5017 TEST(res == VK_SUCCESS && localAllocator);
5018
5019 res = vmaCreateBuffer(localAllocator, &bufCreateInfo, &allocCreateInfo, &alloc.m_Buffer, &alloc.m_Allocation, &allocInfo);
5020
5021 // Make sure it failed.
5022 TEST(res != VK_SUCCESS && !alloc.m_Buffer && !alloc.m_Allocation);
5023
5024 // 4. Try to find memory type.
5025 {
5026 uint32_t memTypeIndex = UINT_MAX;
5027 res = vmaFindMemoryTypeIndex(localAllocator, UINT32_MAX, &allocCreateInfo, &memTypeIndex);
5028 TEST(res != VK_SUCCESS);
5029 }
5030
5031 vmaDestroyAllocator(localAllocator);
5032}
5033
Adam Sawicki40ffe982019-10-11 15:56:02 +02005034static void TestBudget()
5035{
5036 wprintf(L"Testing budget...\n");
5037
Adam Sawicki6a93b8a2020-03-09 16:58:18 +01005038 static const VkDeviceSize BUF_SIZE = 10ull * 1024 * 1024;
Adam Sawicki353e3672019-11-02 14:12:05 +01005039 static const uint32_t BUF_COUNT = 4;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005040
Adam Sawicki6a93b8a2020-03-09 16:58:18 +01005041 const VkPhysicalDeviceMemoryProperties* memProps = {};
5042 vmaGetMemoryProperties(g_hAllocator, &memProps);
5043
Adam Sawicki40ffe982019-10-11 15:56:02 +02005044 for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
5045 {
Adam Sawicki353e3672019-11-02 14:12:05 +01005046 vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
5047
5048 VmaBudget budgetBeg[VK_MAX_MEMORY_HEAPS] = {};
5049 vmaGetBudget(g_hAllocator, budgetBeg);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005050
Adam Sawicki6a93b8a2020-03-09 16:58:18 +01005051 for(uint32_t i = 0; i < memProps->memoryHeapCount; ++i)
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01005052 {
Adam Sawicki6a93b8a2020-03-09 16:58:18 +01005053 TEST(budgetBeg[i].budget > 0);
5054 TEST(budgetBeg[i].budget <= memProps->memoryHeaps[i].size);
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01005055 TEST(budgetBeg[i].allocationBytes <= budgetBeg[i].blockBytes);
5056 }
5057
Adam Sawicki40ffe982019-10-11 15:56:02 +02005058 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5059 bufInfo.size = BUF_SIZE;
5060 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
5061
5062 VmaAllocationCreateInfo allocCreateInfo = {};
5063 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5064 if(testIndex == 0)
5065 {
5066 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5067 }
5068
5069 // CREATE BUFFERS
5070 uint32_t heapIndex = 0;
5071 BufferInfo bufInfos[BUF_COUNT] = {};
5072 for(uint32_t bufIndex = 0; bufIndex < BUF_COUNT; ++bufIndex)
5073 {
5074 VmaAllocationInfo allocInfo;
5075 VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
5076 &bufInfos[bufIndex].Buffer, &bufInfos[bufIndex].Allocation, &allocInfo);
5077 TEST(res == VK_SUCCESS);
5078 if(bufIndex == 0)
5079 {
5080 heapIndex = MemoryTypeToHeap(allocInfo.memoryType);
5081 }
5082 else
5083 {
5084 // All buffers need to fall into the same heap.
5085 TEST(MemoryTypeToHeap(allocInfo.memoryType) == heapIndex);
5086 }
5087 }
5088
Adam Sawicki353e3672019-11-02 14:12:05 +01005089 VmaBudget budgetWithBufs[VK_MAX_MEMORY_HEAPS] = {};
5090 vmaGetBudget(g_hAllocator, budgetWithBufs);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005091
5092 // DESTROY BUFFERS
5093 for(size_t bufIndex = BUF_COUNT; bufIndex--; )
5094 {
5095 vmaDestroyBuffer(g_hAllocator, bufInfos[bufIndex].Buffer, bufInfos[bufIndex].Allocation);
5096 }
5097
Adam Sawicki353e3672019-11-02 14:12:05 +01005098 VmaBudget budgetEnd[VK_MAX_MEMORY_HEAPS] = {};
5099 vmaGetBudget(g_hAllocator, budgetEnd);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005100
5101 // CHECK
Adam Sawicki6a93b8a2020-03-09 16:58:18 +01005102 for(uint32_t i = 0; i < memProps->memoryHeapCount; ++i)
Adam Sawicki40ffe982019-10-11 15:56:02 +02005103 {
Adam Sawicki353e3672019-11-02 14:12:05 +01005104 TEST(budgetEnd[i].allocationBytes <= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005105 if(i == heapIndex)
5106 {
Adam Sawicki353e3672019-11-02 14:12:05 +01005107 TEST(budgetEnd[i].allocationBytes == budgetBeg[i].allocationBytes);
5108 TEST(budgetWithBufs[i].allocationBytes == budgetBeg[i].allocationBytes + BUF_SIZE * BUF_COUNT);
5109 TEST(budgetWithBufs[i].blockBytes >= budgetEnd[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005110 }
5111 else
5112 {
Adam Sawicki353e3672019-11-02 14:12:05 +01005113 TEST(budgetEnd[i].allocationBytes == budgetEnd[i].allocationBytes &&
5114 budgetEnd[i].allocationBytes == budgetWithBufs[i].allocationBytes);
5115 TEST(budgetEnd[i].blockBytes == budgetEnd[i].blockBytes &&
5116 budgetEnd[i].blockBytes == budgetWithBufs[i].blockBytes);
Adam Sawicki40ffe982019-10-11 15:56:02 +02005117 }
5118 }
5119 }
5120}
5121
Adam Sawicki0620c8e2020-08-18 16:43:44 +02005122static void TestAliasing()
5123{
5124 wprintf(L"Testing aliasing...\n");
5125
5126 /*
5127 This is just a simple test, more like a code sample to demonstrate it's possible.
5128 */
5129
5130 // A 512x512 texture to be sampled.
5131 VkImageCreateInfo img1CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
5132 img1CreateInfo.imageType = VK_IMAGE_TYPE_2D;
5133 img1CreateInfo.extent.width = 512;
5134 img1CreateInfo.extent.height = 512;
5135 img1CreateInfo.extent.depth = 1;
5136 img1CreateInfo.mipLevels = 10;
5137 img1CreateInfo.arrayLayers = 1;
5138 img1CreateInfo.format = VK_FORMAT_R8G8B8A8_SRGB;
5139 img1CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
5140 img1CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5141 img1CreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
5142 img1CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
5143
5144 // A full screen texture to be used as color attachment.
5145 VkImageCreateInfo img2CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
5146 img2CreateInfo.imageType = VK_IMAGE_TYPE_2D;
5147 img2CreateInfo.extent.width = 1920;
5148 img2CreateInfo.extent.height = 1080;
5149 img2CreateInfo.extent.depth = 1;
5150 img2CreateInfo.mipLevels = 1;
5151 img2CreateInfo.arrayLayers = 1;
5152 img2CreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
5153 img2CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
5154 img2CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
5155 img2CreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
5156 img2CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
5157
5158 VkImage img1 = VK_NULL_HANDLE;
5159 ERR_GUARD_VULKAN(vkCreateImage(g_hDevice, &img1CreateInfo, g_Allocs, &img1));
5160 VkImage img2 = VK_NULL_HANDLE;
5161 ERR_GUARD_VULKAN(vkCreateImage(g_hDevice, &img2CreateInfo, g_Allocs, &img2));
5162
5163 VkMemoryRequirements img1MemReq = {};
5164 vkGetImageMemoryRequirements(g_hDevice, img1, &img1MemReq);
5165 VkMemoryRequirements img2MemReq = {};
5166 vkGetImageMemoryRequirements(g_hDevice, img2, &img2MemReq);
5167
5168 VkMemoryRequirements finalMemReq = {};
5169 finalMemReq.size = std::max(img1MemReq.size, img2MemReq.size);
5170 finalMemReq.alignment = std::max(img1MemReq.alignment, img2MemReq.alignment);
5171 finalMemReq.memoryTypeBits = img1MemReq.memoryTypeBits & img2MemReq.memoryTypeBits;
5172 if(finalMemReq.memoryTypeBits != 0)
5173 {
5174 wprintf(L" size: max(%llu, %llu) = %llu\n",
5175 img1MemReq.size, img2MemReq.size, finalMemReq.size);
5176 wprintf(L" alignment: max(%llu, %llu) = %llu\n",
5177 img1MemReq.alignment, img2MemReq.alignment, finalMemReq.alignment);
5178 wprintf(L" memoryTypeBits: %u & %u = %u\n",
5179 img1MemReq.memoryTypeBits, img2MemReq.memoryTypeBits, finalMemReq.memoryTypeBits);
5180
5181 VmaAllocationCreateInfo allocCreateInfo = {};
5182 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
5183
5184 VmaAllocation alloc = VK_NULL_HANDLE;
5185 ERR_GUARD_VULKAN(vmaAllocateMemory(g_hAllocator, &finalMemReq, &allocCreateInfo, &alloc, nullptr));
5186
5187 ERR_GUARD_VULKAN(vmaBindImageMemory(g_hAllocator, alloc, img1));
5188 ERR_GUARD_VULKAN(vmaBindImageMemory(g_hAllocator, alloc, img2));
5189
5190 // You can use img1, img2 here, but not at the same time!
5191
5192 vmaFreeMemory(g_hAllocator, alloc);
5193 }
5194 else
5195 {
5196 wprintf(L" Textures cannot alias!\n");
5197 }
5198
5199 vkDestroyImage(g_hDevice, img2, g_Allocs);
5200 vkDestroyImage(g_hDevice, img1, g_Allocs);
5201}
5202
Adam Sawickib8333fb2018-03-13 16:15:53 +01005203static void TestMapping()
5204{
5205 wprintf(L"Testing mapping...\n");
5206
5207 VkResult res;
5208 uint32_t memTypeIndex = UINT32_MAX;
5209
5210 enum TEST
5211 {
5212 TEST_NORMAL,
5213 TEST_POOL,
5214 TEST_DEDICATED,
5215 TEST_COUNT
5216 };
5217 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
5218 {
5219 VmaPool pool = nullptr;
5220 if(testIndex == TEST_POOL)
5221 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005222 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005223 VmaPoolCreateInfo poolInfo = {};
5224 poolInfo.memoryTypeIndex = memTypeIndex;
5225 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005226 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005227 }
5228
5229 VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5230 bufInfo.size = 0x10000;
5231 bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005232
Adam Sawickib8333fb2018-03-13 16:15:53 +01005233 VmaAllocationCreateInfo allocCreateInfo = {};
5234 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5235 allocCreateInfo.pool = pool;
5236 if(testIndex == TEST_DEDICATED)
5237 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005238
Adam Sawickib8333fb2018-03-13 16:15:53 +01005239 VmaAllocationInfo allocInfo;
Adam Sawicki40ffe982019-10-11 15:56:02 +02005240
Adam Sawickib8333fb2018-03-13 16:15:53 +01005241 // Mapped manually
5242
5243 // Create 2 buffers.
5244 BufferInfo bufferInfos[3];
5245 for(size_t i = 0; i < 2; ++i)
5246 {
5247 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
5248 &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005249 TEST(res == VK_SUCCESS);
5250 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005251 memTypeIndex = allocInfo.memoryType;
5252 }
Adam Sawicki40ffe982019-10-11 15:56:02 +02005253
Adam Sawickib8333fb2018-03-13 16:15:53 +01005254 // Map buffer 0.
5255 char* data00 = nullptr;
5256 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005257 TEST(res == VK_SUCCESS && data00 != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005258 data00[0xFFFF] = data00[0];
5259
5260 // Map buffer 0 second time.
5261 char* data01 = nullptr;
5262 res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005263 TEST(res == VK_SUCCESS && data01 == data00);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005264
5265 // Map buffer 1.
5266 char* data1 = nullptr;
5267 res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005268 TEST(res == VK_SUCCESS && data1 != nullptr);
5269 TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
Adam Sawickib8333fb2018-03-13 16:15:53 +01005270 data1[0xFFFF] = data1[0];
5271
5272 // Unmap buffer 0 two times.
5273 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
5274 vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
5275 vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005276 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005277
5278 // Unmap buffer 1.
5279 vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
5280 vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005281 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005282
5283 // Create 3rd buffer - persistently mapped.
5284 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
5285 res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
5286 &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005287 TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005288
5289 // Map buffer 2.
5290 char* data2 = nullptr;
5291 res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005292 TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005293 data2[0xFFFF] = data2[0];
5294
5295 // Unmap buffer 2.
5296 vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
5297 vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005298 TEST(allocInfo.pMappedData == data2);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005299
5300 // Destroy all buffers.
5301 for(size_t i = 3; i--; )
5302 vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
5303
5304 vmaDestroyPool(g_hAllocator, pool);
5305 }
5306}
5307
Adam Sawickidaa6a552019-06-25 15:26:37 +02005308// Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
5309static void TestDeviceLocalMapped()
5310{
5311 VkResult res;
5312
5313 for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
5314 {
5315 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5316 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
5317 bufCreateInfo.size = 4096;
5318
5319 VmaPool pool = VK_NULL_HANDLE;
5320 VmaAllocationCreateInfo allocCreateInfo = {};
5321 allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
5322 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
5323 if(testIndex == 2)
5324 {
5325 VmaPoolCreateInfo poolCreateInfo = {};
5326 res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
5327 TEST(res == VK_SUCCESS);
5328 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
5329 TEST(res == VK_SUCCESS);
5330 allocCreateInfo.pool = pool;
5331 }
5332 else if(testIndex == 1)
5333 {
5334 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
5335 }
5336
5337 VkBuffer buf = VK_NULL_HANDLE;
5338 VmaAllocation alloc = VK_NULL_HANDLE;
5339 VmaAllocationInfo allocInfo = {};
5340 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
5341 TEST(res == VK_SUCCESS && alloc);
5342
5343 VkMemoryPropertyFlags memTypeFlags = 0;
5344 vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
5345 const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
5346 TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
5347
5348 vmaDestroyBuffer(g_hAllocator, buf, alloc);
5349 vmaDestroyPool(g_hAllocator, pool);
5350 }
5351}
5352
Adam Sawickib8333fb2018-03-13 16:15:53 +01005353static void TestMappingMultithreaded()
5354{
5355 wprintf(L"Testing mapping multithreaded...\n");
5356
5357 static const uint32_t threadCount = 16;
5358 static const uint32_t bufferCount = 1024;
5359 static const uint32_t threadBufferCount = bufferCount / threadCount;
5360
5361 VkResult res;
5362 volatile uint32_t memTypeIndex = UINT32_MAX;
5363
5364 enum TEST
5365 {
5366 TEST_NORMAL,
5367 TEST_POOL,
5368 TEST_DEDICATED,
5369 TEST_COUNT
5370 };
5371 for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
5372 {
5373 VmaPool pool = nullptr;
5374 if(testIndex == TEST_POOL)
5375 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005376 TEST(memTypeIndex != UINT32_MAX);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005377 VmaPoolCreateInfo poolInfo = {};
5378 poolInfo.memoryTypeIndex = memTypeIndex;
5379 res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005380 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005381 }
5382
5383 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
5384 bufCreateInfo.size = 0x10000;
5385 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
5386
5387 VmaAllocationCreateInfo allocCreateInfo = {};
5388 allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
5389 allocCreateInfo.pool = pool;
5390 if(testIndex == TEST_DEDICATED)
5391 allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
5392
5393 std::thread threads[threadCount];
5394 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
5395 {
5396 threads[threadIndex] = std::thread([=, &memTypeIndex](){
5397 // ======== THREAD FUNCTION ========
5398
5399 RandomNumberGenerator rand{threadIndex};
5400
5401 enum class MODE
5402 {
5403 // Don't map this buffer at all.
5404 DONT_MAP,
5405 // Map and quickly unmap.
5406 MAP_FOR_MOMENT,
5407 // Map and unmap before destruction.
5408 MAP_FOR_LONGER,
5409 // Map two times. Quickly unmap, second unmap before destruction.
5410 MAP_TWO_TIMES,
5411 // Create this buffer as persistently mapped.
5412 PERSISTENTLY_MAPPED,
5413 COUNT
5414 };
5415 std::vector<BufferInfo> bufInfos{threadBufferCount};
5416 std::vector<MODE> bufModes{threadBufferCount};
5417
5418 for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
5419 {
5420 BufferInfo& bufInfo = bufInfos[bufferIndex];
5421 const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
5422 bufModes[bufferIndex] = mode;
5423
5424 VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
5425 if(mode == MODE::PERSISTENTLY_MAPPED)
5426 localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
5427
5428 VmaAllocationInfo allocInfo;
5429 VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
5430 &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005431 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005432
5433 if(memTypeIndex == UINT32_MAX)
5434 memTypeIndex = allocInfo.memoryType;
5435
5436 char* data = nullptr;
5437
5438 if(mode == MODE::PERSISTENTLY_MAPPED)
5439 {
5440 data = (char*)allocInfo.pMappedData;
Adam Sawickib8d34d52018-10-03 17:41:20 +02005441 TEST(data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005442 }
5443 else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
5444 mode == MODE::MAP_TWO_TIMES)
5445 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005446 TEST(data == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005447 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005448 TEST(res == VK_SUCCESS && data != nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005449
5450 if(mode == MODE::MAP_TWO_TIMES)
5451 {
5452 char* data2 = nullptr;
5453 res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005454 TEST(res == VK_SUCCESS && data2 == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005455 }
5456 }
5457 else if(mode == MODE::DONT_MAP)
5458 {
Adam Sawickib8d34d52018-10-03 17:41:20 +02005459 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005460 }
5461 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02005462 TEST(0);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005463
5464 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
5465 if(data)
5466 data[0xFFFF] = data[0];
5467
5468 if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
5469 {
5470 vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
5471
5472 VmaAllocationInfo allocInfo;
5473 vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
5474 if(mode == MODE::MAP_FOR_MOMENT)
Adam Sawickib8d34d52018-10-03 17:41:20 +02005475 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005476 else
Adam Sawickib8d34d52018-10-03 17:41:20 +02005477 TEST(allocInfo.pMappedData == data);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005478 }
5479
5480 switch(rand.Generate() % 3)
5481 {
5482 case 0: Sleep(0); break; // Yield.
5483 case 1: Sleep(10); break; // 10 ms
5484 // default: No sleep.
5485 }
5486
5487 // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
5488 if(data)
5489 data[0xFFFF] = data[0];
5490 }
5491
5492 for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
5493 {
5494 if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
5495 bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
5496 {
5497 vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
5498
5499 VmaAllocationInfo allocInfo;
5500 vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005501 TEST(allocInfo.pMappedData == nullptr);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005502 }
5503
5504 vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
5505 }
5506 });
5507 }
5508
5509 for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
5510 threads[threadIndex].join();
5511
5512 vmaDestroyPool(g_hAllocator, pool);
5513 }
5514}
5515
5516static void WriteMainTestResultHeader(FILE* file)
5517{
5518 fprintf(file,
Adam Sawicki740b08f2018-08-27 13:42:07 +02005519 "Code,Time,"
5520 "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01005521 "Total Time (us),"
5522 "Allocation Time Min (us),"
5523 "Allocation Time Avg (us),"
5524 "Allocation Time Max (us),"
5525 "Deallocation Time Min (us),"
5526 "Deallocation Time Avg (us),"
5527 "Deallocation Time Max (us),"
5528 "Total Memory Allocated (B),"
5529 "Free Range Size Avg (B),"
5530 "Free Range Size Max (B)\n");
5531}
5532
5533static void WriteMainTestResult(
5534 FILE* file,
5535 const char* codeDescription,
5536 const char* testDescription,
5537 const Config& config, const Result& result)
5538{
5539 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
5540 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
5541 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
5542 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
5543 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
5544 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
5545 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
5546
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005547 std::string currTime;
5548 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005549
5550 fprintf(file,
5551 "%s,%s,%s,"
Adam Sawickib8333fb2018-03-13 16:15:53 +01005552 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
5553 codeDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005554 currTime.c_str(),
Adam Sawicki740b08f2018-08-27 13:42:07 +02005555 testDescription,
Adam Sawickib8333fb2018-03-13 16:15:53 +01005556 totalTimeSeconds * 1e6f,
5557 allocationTimeMinSeconds * 1e6f,
5558 allocationTimeAvgSeconds * 1e6f,
5559 allocationTimeMaxSeconds * 1e6f,
5560 deallocationTimeMinSeconds * 1e6f,
5561 deallocationTimeAvgSeconds * 1e6f,
5562 deallocationTimeMaxSeconds * 1e6f,
5563 result.TotalMemoryAllocated,
5564 result.FreeRangeSizeAvg,
5565 result.FreeRangeSizeMax);
5566}
5567
5568static void WritePoolTestResultHeader(FILE* file)
5569{
5570 fprintf(file,
5571 "Code,Test,Time,"
5572 "Config,"
5573 "Total Time (us),"
5574 "Allocation Time Min (us),"
5575 "Allocation Time Avg (us),"
5576 "Allocation Time Max (us),"
5577 "Deallocation Time Min (us),"
5578 "Deallocation Time Avg (us),"
5579 "Deallocation Time Max (us),"
5580 "Lost Allocation Count,"
5581 "Lost Allocation Total Size (B),"
5582 "Failed Allocation Count,"
5583 "Failed Allocation Total Size (B)\n");
5584}
5585
5586static void WritePoolTestResult(
5587 FILE* file,
5588 const char* codeDescription,
5589 const char* testDescription,
5590 const PoolTestConfig& config,
5591 const PoolTestResult& result)
5592{
5593 float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
5594 float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
5595 float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
5596 float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
5597 float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
5598 float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
5599 float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
5600
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005601 std::string currTime;
5602 CurrentTimeToStr(currTime);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005603
5604 fprintf(file,
5605 "%s,%s,%s,"
5606 "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
5607 "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
5608 // General
5609 codeDescription,
5610 testDescription,
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005611 currTime.c_str(),
Adam Sawickib8333fb2018-03-13 16:15:53 +01005612 // Config
5613 config.ThreadCount,
5614 (unsigned long long)config.PoolSize,
5615 config.FrameCount,
5616 config.TotalItemCount,
5617 config.UsedItemCountMin,
5618 config.UsedItemCountMax,
5619 config.ItemsToMakeUnusedPercent,
5620 // Results
5621 totalTimeSeconds * 1e6f,
5622 allocationTimeMinSeconds * 1e6f,
5623 allocationTimeAvgSeconds * 1e6f,
5624 allocationTimeMaxSeconds * 1e6f,
5625 deallocationTimeMinSeconds * 1e6f,
5626 deallocationTimeAvgSeconds * 1e6f,
5627 deallocationTimeMaxSeconds * 1e6f,
5628 result.LostAllocationCount,
5629 result.LostAllocationTotalSize,
5630 result.FailedAllocationCount,
5631 result.FailedAllocationTotalSize);
5632}
5633
5634static void PerformCustomMainTest(FILE* file)
5635{
5636 Config config{};
5637 config.RandSeed = 65735476;
5638 //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
5639 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
5640 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
5641 config.FreeOrder = FREE_ORDER::FORWARD;
5642 config.ThreadCount = 16;
5643 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
Adam Sawicki0667e332018-08-24 17:26:44 +02005644 config.AllocationStrategy = 0;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005645
5646 // Buffers
5647 //config.AllocationSizes.push_back({4, 16, 1024});
5648 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5649
5650 // Images
5651 //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5652 //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5653
5654 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
5655 config.AdditionalOperationCount = 1024;
5656
5657 Result result{};
5658 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005659 TEST(res == VK_SUCCESS);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005660 WriteMainTestResult(file, "Foo", "CustomTest", config, result);
5661}
5662
5663static void PerformCustomPoolTest(FILE* file)
5664{
5665 PoolTestConfig config;
5666 config.PoolSize = 100 * 1024 * 1024;
5667 config.RandSeed = 2345764;
5668 config.ThreadCount = 1;
5669 config.FrameCount = 200;
5670 config.ItemsToMakeUnusedPercent = 2;
5671
5672 AllocationSize allocSize = {};
5673 allocSize.BufferSizeMin = 1024;
5674 allocSize.BufferSizeMax = 1024 * 1024;
5675 allocSize.Probability = 1;
5676 config.AllocationSizes.push_back(allocSize);
5677
5678 allocSize.BufferSizeMin = 0;
5679 allocSize.BufferSizeMax = 0;
5680 allocSize.ImageSizeMin = 128;
5681 allocSize.ImageSizeMax = 1024;
5682 allocSize.Probability = 1;
5683 config.AllocationSizes.push_back(allocSize);
5684
5685 config.PoolSize = config.CalcAvgResourceSize() * 200;
5686 config.UsedItemCountMax = 160;
5687 config.TotalItemCount = config.UsedItemCountMax * 10;
5688 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
5689
Adam Sawickib8333fb2018-03-13 16:15:53 +01005690 PoolTestResult result = {};
5691 TestPool_Benchmark(result, config);
Adam Sawickib8333fb2018-03-13 16:15:53 +01005692
5693 WritePoolTestResult(file, "Code desc", "Test desc", config, result);
5694}
5695
Adam Sawickib8333fb2018-03-13 16:15:53 +01005696static void PerformMainTests(FILE* file)
5697{
5698 uint32_t repeatCount = 1;
5699 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
5700
5701 Config config{};
5702 config.RandSeed = 65735476;
5703 config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
5704 config.FreeOrder = FREE_ORDER::FORWARD;
5705
5706 size_t threadCountCount = 1;
5707 switch(ConfigType)
5708 {
5709 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
5710 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
5711 case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
5712 case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
5713 case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
5714 default: assert(0);
5715 }
Adam Sawicki0667e332018-08-24 17:26:44 +02005716
Adam Sawicki0a3fb6c2018-08-27 14:40:27 +02005717 const size_t strategyCount = GetAllocationStrategyCount();
Adam Sawicki0667e332018-08-24 17:26:44 +02005718
Adam Sawickib8333fb2018-03-13 16:15:53 +01005719 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
5720 {
5721 std::string desc1;
5722
5723 switch(threadCountIndex)
5724 {
5725 case 0:
5726 desc1 += "1_thread";
5727 config.ThreadCount = 1;
5728 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5729 break;
5730 case 1:
5731 desc1 += "16_threads+0%_common";
5732 config.ThreadCount = 16;
5733 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5734 break;
5735 case 2:
5736 desc1 += "16_threads+50%_common";
5737 config.ThreadCount = 16;
5738 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
5739 break;
5740 case 3:
5741 desc1 += "16_threads+100%_common";
5742 config.ThreadCount = 16;
5743 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
5744 break;
5745 case 4:
5746 desc1 += "2_threads+0%_common";
5747 config.ThreadCount = 2;
5748 config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
5749 break;
5750 case 5:
5751 desc1 += "2_threads+50%_common";
5752 config.ThreadCount = 2;
5753 config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
5754 break;
5755 case 6:
5756 desc1 += "2_threads+100%_common";
5757 config.ThreadCount = 2;
5758 config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
5759 break;
5760 default:
5761 assert(0);
5762 }
5763
5764 // 0 = buffers, 1 = images, 2 = buffers and images
5765 size_t buffersVsImagesCount = 2;
5766 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
5767 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
5768 {
5769 std::string desc2 = desc1;
5770 switch(buffersVsImagesIndex)
5771 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005772 case 0: desc2 += ",Buffers"; break;
5773 case 1: desc2 += ",Images"; break;
5774 case 2: desc2 += ",Buffers+Images"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005775 default: assert(0);
5776 }
5777
5778 // 0 = small, 1 = large, 2 = small and large
5779 size_t smallVsLargeCount = 2;
5780 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
5781 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
5782 {
5783 std::string desc3 = desc2;
5784 switch(smallVsLargeIndex)
5785 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005786 case 0: desc3 += ",Small"; break;
5787 case 1: desc3 += ",Large"; break;
5788 case 2: desc3 += ",Small+Large"; break;
Adam Sawickib8333fb2018-03-13 16:15:53 +01005789 default: assert(0);
5790 }
5791
5792 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5793 config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
5794 else
5795 config.MaxBytesToAllocate = 4ull * 1024 * 1024;
5796
5797 // 0 = varying sizes min...max, 1 = set of constant sizes
5798 size_t constantSizesCount = 1;
5799 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
5800 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
5801 {
5802 std::string desc4 = desc3;
5803 switch(constantSizesIndex)
5804 {
5805 case 0: desc4 += " Varying_sizes"; break;
5806 case 1: desc4 += " Constant_sizes"; break;
5807 default: assert(0);
5808 }
5809
5810 config.AllocationSizes.clear();
5811 // Buffers present
5812 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
5813 {
5814 // Small
5815 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5816 {
5817 // Varying size
5818 if(constantSizesIndex == 0)
5819 config.AllocationSizes.push_back({4, 16, 1024});
5820 // Constant sizes
5821 else
5822 {
5823 config.AllocationSizes.push_back({1, 16, 16});
5824 config.AllocationSizes.push_back({1, 64, 64});
5825 config.AllocationSizes.push_back({1, 256, 256});
5826 config.AllocationSizes.push_back({1, 1024, 1024});
5827 }
5828 }
5829 // Large
5830 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5831 {
5832 // Varying size
5833 if(constantSizesIndex == 0)
5834 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
5835 // Constant sizes
5836 else
5837 {
5838 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
5839 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
5840 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
5841 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
5842 }
5843 }
5844 }
5845 // Images present
5846 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
5847 {
5848 // Small
5849 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
5850 {
5851 // Varying size
5852 if(constantSizesIndex == 0)
5853 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
5854 // Constant sizes
5855 else
5856 {
5857 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
5858 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
5859 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
5860 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
5861 }
5862 }
5863 // Large
5864 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
5865 {
5866 // Varying size
5867 if(constantSizesIndex == 0)
5868 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
5869 // Constant sizes
5870 else
5871 {
5872 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
5873 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
5874 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
5875 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
5876 }
5877 }
5878 }
5879
5880 // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
5881 size_t beginBytesToAllocateCount = 1;
5882 if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
5883 if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
5884 if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
5885 for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
5886 {
5887 std::string desc5 = desc4;
5888
5889 switch(beginBytesToAllocateIndex)
5890 {
5891 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005892 desc5 += ",Allocate_100%";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005893 config.BeginBytesToAllocate = config.MaxBytesToAllocate;
5894 config.AdditionalOperationCount = 0;
5895 break;
5896 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005897 desc5 += ",Allocate_50%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005898 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
5899 config.AdditionalOperationCount = 1024;
5900 break;
5901 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005902 desc5 += ",Allocate_5%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005903 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
5904 config.AdditionalOperationCount = 1024;
5905 break;
5906 case 3:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005907 desc5 += ",Allocate_95%+Operations";
Adam Sawickib8333fb2018-03-13 16:15:53 +01005908 config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
5909 config.AdditionalOperationCount = 1024;
5910 break;
5911 default:
5912 assert(0);
5913 }
5914
Adam Sawicki0667e332018-08-24 17:26:44 +02005915 for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
Adam Sawickib8333fb2018-03-13 16:15:53 +01005916 {
Adam Sawicki0667e332018-08-24 17:26:44 +02005917 std::string desc6 = desc5;
5918 switch(strategyIndex)
5919 {
5920 case 0:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005921 desc6 += ",BestFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005922 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
5923 break;
5924 case 1:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005925 desc6 += ",WorstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005926 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
5927 break;
5928 case 2:
Adam Sawicki740b08f2018-08-27 13:42:07 +02005929 desc6 += ",FirstFit";
Adam Sawicki0667e332018-08-24 17:26:44 +02005930 config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
5931 break;
5932 default:
5933 assert(0);
5934 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01005935
Adam Sawicki33d2ce72018-08-27 13:59:13 +02005936 desc6 += ',';
5937 desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
Adam Sawicki740b08f2018-08-27 13:42:07 +02005938
5939 const char* testDescription = desc6.c_str();
Adam Sawicki0667e332018-08-24 17:26:44 +02005940
5941 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
5942 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02005943 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawicki0667e332018-08-24 17:26:44 +02005944
5945 Result result{};
5946 VkResult res = MainTest(result, config);
Adam Sawickib8d34d52018-10-03 17:41:20 +02005947 TEST(res == VK_SUCCESS);
Adam Sawicki740b08f2018-08-27 13:42:07 +02005948 if(file)
5949 {
5950 WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
5951 }
Adam Sawicki0667e332018-08-24 17:26:44 +02005952 }
Adam Sawickib8333fb2018-03-13 16:15:53 +01005953 }
5954 }
5955 }
5956 }
5957 }
5958 }
5959}
5960
5961static void PerformPoolTests(FILE* file)
5962{
5963 const size_t AVG_RESOURCES_PER_POOL = 300;
5964
5965 uint32_t repeatCount = 1;
5966 if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
5967
5968 PoolTestConfig config{};
5969 config.RandSeed = 2346343;
5970 config.FrameCount = 200;
5971 config.ItemsToMakeUnusedPercent = 2;
5972
5973 size_t threadCountCount = 1;
5974 switch(ConfigType)
5975 {
5976 case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
5977 case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
5978 case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
5979 case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
5980 case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
5981 default: assert(0);
5982 }
5983 for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
5984 {
5985 std::string desc1;
5986
5987 switch(threadCountIndex)
5988 {
5989 case 0:
5990 desc1 += "1_thread";
5991 config.ThreadCount = 1;
5992 break;
5993 case 1:
5994 desc1 += "16_threads";
5995 config.ThreadCount = 16;
5996 break;
5997 case 2:
5998 desc1 += "2_threads";
5999 config.ThreadCount = 2;
6000 break;
6001 default:
6002 assert(0);
6003 }
6004
6005 // 0 = buffers, 1 = images, 2 = buffers and images
6006 size_t buffersVsImagesCount = 2;
6007 if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
6008 for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
6009 {
6010 std::string desc2 = desc1;
6011 switch(buffersVsImagesIndex)
6012 {
6013 case 0: desc2 += " Buffers"; break;
6014 case 1: desc2 += " Images"; break;
6015 case 2: desc2 += " Buffers+Images"; break;
6016 default: assert(0);
6017 }
6018
6019 // 0 = small, 1 = large, 2 = small and large
6020 size_t smallVsLargeCount = 2;
6021 if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
6022 for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
6023 {
6024 std::string desc3 = desc2;
6025 switch(smallVsLargeIndex)
6026 {
6027 case 0: desc3 += " Small"; break;
6028 case 1: desc3 += " Large"; break;
6029 case 2: desc3 += " Small+Large"; break;
6030 default: assert(0);
6031 }
6032
6033 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
6034 config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
6035 else
6036 config.PoolSize = 4ull * 1024 * 1024;
6037
6038 // 0 = varying sizes min...max, 1 = set of constant sizes
6039 size_t constantSizesCount = 1;
6040 if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
6041 for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
6042 {
6043 std::string desc4 = desc3;
6044 switch(constantSizesIndex)
6045 {
6046 case 0: desc4 += " Varying_sizes"; break;
6047 case 1: desc4 += " Constant_sizes"; break;
6048 default: assert(0);
6049 }
6050
6051 config.AllocationSizes.clear();
6052 // Buffers present
6053 if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
6054 {
6055 // Small
6056 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
6057 {
6058 // Varying size
6059 if(constantSizesIndex == 0)
6060 config.AllocationSizes.push_back({4, 16, 1024});
6061 // Constant sizes
6062 else
6063 {
6064 config.AllocationSizes.push_back({1, 16, 16});
6065 config.AllocationSizes.push_back({1, 64, 64});
6066 config.AllocationSizes.push_back({1, 256, 256});
6067 config.AllocationSizes.push_back({1, 1024, 1024});
6068 }
6069 }
6070 // Large
6071 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
6072 {
6073 // Varying size
6074 if(constantSizesIndex == 0)
6075 config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
6076 // Constant sizes
6077 else
6078 {
6079 config.AllocationSizes.push_back({1, 0x10000, 0x10000});
6080 config.AllocationSizes.push_back({1, 0x80000, 0x80000});
6081 config.AllocationSizes.push_back({1, 0x200000, 0x200000});
6082 config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
6083 }
6084 }
6085 }
6086 // Images present
6087 if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
6088 {
6089 // Small
6090 if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
6091 {
6092 // Varying size
6093 if(constantSizesIndex == 0)
6094 config.AllocationSizes.push_back({4, 0, 0, 4, 32});
6095 // Constant sizes
6096 else
6097 {
6098 config.AllocationSizes.push_back({1, 0, 0, 4, 4});
6099 config.AllocationSizes.push_back({1, 0, 0, 8, 8});
6100 config.AllocationSizes.push_back({1, 0, 0, 16, 16});
6101 config.AllocationSizes.push_back({1, 0, 0, 32, 32});
6102 }
6103 }
6104 // Large
6105 if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
6106 {
6107 // Varying size
6108 if(constantSizesIndex == 0)
6109 config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
6110 // Constant sizes
6111 else
6112 {
6113 config.AllocationSizes.push_back({1, 0, 0, 256, 256});
6114 config.AllocationSizes.push_back({1, 0, 0, 512, 512});
6115 config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
6116 config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
6117 }
6118 }
6119 }
6120
6121 const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
6122 config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
6123
6124 // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
6125 size_t subscriptionModeCount;
6126 switch(ConfigType)
6127 {
6128 case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
6129 case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
6130 case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
6131 case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
6132 case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
6133 default: assert(0);
6134 }
6135 for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
6136 {
6137 std::string desc5 = desc4;
6138
6139 switch(subscriptionModeIndex)
6140 {
6141 case 0:
6142 desc5 += " Subscription_66%";
6143 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
6144 break;
6145 case 1:
6146 desc5 += " Subscription_133%";
6147 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
6148 break;
6149 case 2:
6150 desc5 += " Subscription_100%";
6151 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
6152 break;
6153 case 3:
6154 desc5 += " Subscription_33%";
6155 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
6156 break;
6157 case 4:
6158 desc5 += " Subscription_166%";
6159 config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
6160 break;
6161 default:
6162 assert(0);
6163 }
6164
6165 config.TotalItemCount = config.UsedItemCountMax * 5;
6166 config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
6167
6168 const char* testDescription = desc5.c_str();
6169
6170 for(size_t repeat = 0; repeat < repeatCount; ++repeat)
6171 {
Adam Sawicki740b08f2018-08-27 13:42:07 +02006172 printf("%s #%u\n", testDescription, (uint32_t)repeat);
Adam Sawickib8333fb2018-03-13 16:15:53 +01006173
6174 PoolTestResult result{};
Adam Sawickib8333fb2018-03-13 16:15:53 +01006175 TestPool_Benchmark(result, config);
Adam Sawickib8333fb2018-03-13 16:15:53 +01006176 WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
6177 }
6178 }
6179 }
6180 }
6181 }
6182 }
6183}
6184
Adam Sawickia83793a2018-09-03 13:40:42 +02006185static void BasicTestBuddyAllocator()
6186{
6187 wprintf(L"Basic test buddy allocator\n");
6188
6189 RandomNumberGenerator rand{76543};
6190
6191 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
6192 sampleBufCreateInfo.size = 1024; // Whatever.
6193 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
6194
6195 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
6196 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
6197
6198 VmaPoolCreateInfo poolCreateInfo = {};
6199 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006200 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006201
Adam Sawickid6e6d6b2018-09-21 14:07:02 +02006202 // Deliberately adding 1023 to test usable size smaller than memory block size.
6203 poolCreateInfo.blockSize = 1024 * 1024 + 1023;
Adam Sawickia83793a2018-09-03 13:40:42 +02006204 poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
Adam Sawicki80927152018-09-07 17:27:23 +02006205 //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
Adam Sawickia83793a2018-09-03 13:40:42 +02006206
6207 VmaPool pool = nullptr;
6208 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006209 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006210
6211 VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
6212
6213 VmaAllocationCreateInfo allocCreateInfo = {};
6214 allocCreateInfo.pool = pool;
6215
6216 std::vector<BufferInfo> bufInfo;
6217 BufferInfo newBufInfo;
6218 VmaAllocationInfo allocInfo;
6219
6220 bufCreateInfo.size = 1024 * 256;
6221 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6222 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006223 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006224 bufInfo.push_back(newBufInfo);
6225
6226 bufCreateInfo.size = 1024 * 512;
6227 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6228 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006229 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006230 bufInfo.push_back(newBufInfo);
6231
6232 bufCreateInfo.size = 1024 * 128;
6233 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6234 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006235 TEST(res == VK_SUCCESS);
Adam Sawickia83793a2018-09-03 13:40:42 +02006236 bufInfo.push_back(newBufInfo);
Adam Sawickia01d4582018-09-21 14:22:35 +02006237
6238 // Test very small allocation, smaller than minimum node size.
6239 bufCreateInfo.size = 1;
6240 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6241 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006242 TEST(res == VK_SUCCESS);
Adam Sawickia01d4582018-09-21 14:22:35 +02006243 bufInfo.push_back(newBufInfo);
Adam Sawickia83793a2018-09-03 13:40:42 +02006244
Adam Sawicki9933c5c2018-09-21 14:57:24 +02006245 // Test some small allocation with alignment requirement.
6246 {
6247 VkMemoryRequirements memReq;
6248 memReq.alignment = 256;
6249 memReq.memoryTypeBits = UINT32_MAX;
6250 memReq.size = 32;
6251
6252 newBufInfo.Buffer = VK_NULL_HANDLE;
6253 res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
6254 &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006255 TEST(res == VK_SUCCESS);
6256 TEST(allocInfo.offset % memReq.alignment == 0);
Adam Sawicki9933c5c2018-09-21 14:57:24 +02006257 bufInfo.push_back(newBufInfo);
6258 }
6259
6260 //SaveAllocatorStatsToFile(L"TEST.json");
6261
Adam Sawicki21017c62018-09-07 15:26:59 +02006262 VmaPoolStats stats = {};
6263 vmaGetPoolStats(g_hAllocator, pool, &stats);
6264 int DBG = 0; // Set breakpoint here to inspect `stats`.
6265
Adam Sawicki80927152018-09-07 17:27:23 +02006266 // Allocate enough new buffers to surely fall into second block.
6267 for(uint32_t i = 0; i < 32; ++i)
6268 {
6269 bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
6270 res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
6271 &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
Adam Sawickib8d34d52018-10-03 17:41:20 +02006272 TEST(res == VK_SUCCESS);
Adam Sawicki80927152018-09-07 17:27:23 +02006273 bufInfo.push_back(newBufInfo);
6274 }
6275
6276 SaveAllocatorStatsToFile(L"BuddyTest01.json");
6277
Adam Sawickia83793a2018-09-03 13:40:42 +02006278 // Destroy the buffers in random order.
6279 while(!bufInfo.empty())
6280 {
6281 const size_t indexToDestroy = rand.Generate() % bufInfo.size();
6282 const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
6283 vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
6284 bufInfo.erase(bufInfo.begin() + indexToDestroy);
6285 }
6286
6287 vmaDestroyPool(g_hAllocator, pool);
6288}
6289
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006290static void BasicTestAllocatePages()
6291{
6292 wprintf(L"Basic test allocate pages\n");
6293
6294 RandomNumberGenerator rand{765461};
6295
6296 VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
6297 sampleBufCreateInfo.size = 1024; // Whatever.
6298 sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
6299
6300 VmaAllocationCreateInfo sampleAllocCreateInfo = {};
6301 sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
6302
6303 VmaPoolCreateInfo poolCreateInfo = {};
6304 VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
Adam Sawickia7d77692018-10-03 16:15:27 +02006305 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006306
6307 // 1 block of 1 MB.
6308 poolCreateInfo.blockSize = 1024 * 1024;
6309 poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
6310
6311 // Create pool.
6312 VmaPool pool = nullptr;
6313 res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
Adam Sawickia7d77692018-10-03 16:15:27 +02006314 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006315
6316 // Make 100 allocations of 4 KB - they should fit into the pool.
6317 VkMemoryRequirements memReq;
6318 memReq.memoryTypeBits = UINT32_MAX;
6319 memReq.alignment = 4 * 1024;
6320 memReq.size = 4 * 1024;
6321
6322 VmaAllocationCreateInfo allocCreateInfo = {};
6323 allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
6324 allocCreateInfo.pool = pool;
6325
6326 constexpr uint32_t allocCount = 100;
6327
6328 std::vector<VmaAllocation> alloc{allocCount};
6329 std::vector<VmaAllocationInfo> allocInfo{allocCount};
6330 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006331 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006332 for(uint32_t i = 0; i < allocCount; ++i)
6333 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006334 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006335 allocInfo[i].pMappedData != nullptr &&
6336 allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
6337 allocInfo[i].memoryType == allocInfo[0].memoryType);
6338 }
6339
6340 // Free the allocations.
6341 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
6342 std::fill(alloc.begin(), alloc.end(), nullptr);
6343 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
6344
6345 // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
6346 // Also test optional allocationInfo = null.
6347 memReq.size = 100 * 1024;
6348 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
Adam Sawickia7d77692018-10-03 16:15:27 +02006349 TEST(res != VK_SUCCESS);
6350 TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006351
6352 // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
6353 memReq.size = 4 * 1024;
6354 memReq.alignment = 128 * 1024;
6355 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006356 TEST(res != VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006357
6358 // Make 100 dedicated allocations of 4 KB.
6359 memReq.alignment = 4 * 1024;
6360 memReq.size = 4 * 1024;
6361
6362 VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
6363 dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
6364 dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
6365 res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
Adam Sawickia7d77692018-10-03 16:15:27 +02006366 TEST(res == VK_SUCCESS);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006367 for(uint32_t i = 0; i < allocCount; ++i)
6368 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006369 TEST(alloc[i] != VK_NULL_HANDLE &&
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006370 allocInfo[i].pMappedData != nullptr &&
6371 allocInfo[i].memoryType == allocInfo[0].memoryType &&
6372 allocInfo[i].offset == 0);
6373 if(i > 0)
6374 {
Adam Sawickia7d77692018-10-03 16:15:27 +02006375 TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006376 }
6377 }
6378
6379 // Free the allocations.
6380 vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
6381 std::fill(alloc.begin(), alloc.end(), nullptr);
6382 std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
6383
6384 vmaDestroyPool(g_hAllocator, pool);
6385}
6386
Adam Sawickif2975342018-10-16 13:49:02 +02006387// Test the testing environment.
6388static void TestGpuData()
6389{
6390 RandomNumberGenerator rand = { 53434 };
6391
6392 std::vector<AllocInfo> allocInfo;
6393
6394 for(size_t i = 0; i < 100; ++i)
6395 {
6396 AllocInfo info = {};
6397
6398 info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
6399 info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
6400 VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
6401 VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
6402 info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
6403
6404 VmaAllocationCreateInfo allocCreateInfo = {};
6405 allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
6406
6407 VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
6408 TEST(res == VK_SUCCESS);
6409
6410 info.m_StartValue = rand.Generate();
6411
6412 allocInfo.push_back(std::move(info));
6413 }
6414
6415 UploadGpuData(allocInfo.data(), allocInfo.size());
6416
6417 ValidateGpuData(allocInfo.data(), allocInfo.size());
6418
6419 DestroyAllAllocations(allocInfo);
6420}
6421
Adam Sawickib8333fb2018-03-13 16:15:53 +01006422void Test()
6423{
6424 wprintf(L"TESTING:\n");
6425
Adam Sawicki48b8a332019-11-02 15:24:33 +01006426 if(false)
Adam Sawicki70a683e2018-08-24 15:36:32 +02006427 {
Adam Sawicki1a8424f2018-12-13 11:01:16 +01006428 ////////////////////////////////////////////////////////////////////////////////
6429 // Temporarily insert custom tests here:
Adam Sawicki70a683e2018-08-24 15:36:32 +02006430 return;
6431 }
6432
Adam Sawickib8333fb2018-03-13 16:15:53 +01006433 // # Simple tests
6434
6435 TestBasics();
Adam Sawickiaaa1a562020-06-24 17:41:09 +02006436 TestAllocationVersusResourceSize();
Adam Sawickif2975342018-10-16 13:49:02 +02006437 //TestGpuData(); // Not calling this because it's just testing the testing environment.
Adam Sawicki212a4a62018-06-14 15:44:45 +02006438#if VMA_DEBUG_MARGIN
6439 TestDebugMargin();
6440#else
6441 TestPool_SameSize();
Adam Sawickiddcbf8c2019-11-22 15:22:42 +01006442 TestPool_MinBlockCount();
Adam Sawicki212a4a62018-06-14 15:44:45 +02006443 TestHeapSizeLimit();
6444#endif
Adam Sawickie44c6262018-06-15 14:30:39 +02006445#if VMA_DEBUG_INITIALIZE_ALLOCATIONS
6446 TestAllocationsInitialization();
6447#endif
Adam Sawickiefa88c42019-11-18 16:33:56 +01006448 TestMemoryUsage();
Adam Sawicki50882502020-02-07 16:51:31 +01006449 TestDeviceCoherentMemory();
Adam Sawicki40ffe982019-10-11 15:56:02 +02006450 TestBudget();
Adam Sawicki0620c8e2020-08-18 16:43:44 +02006451 TestAliasing();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006452 TestMapping();
Adam Sawickidaa6a552019-06-25 15:26:37 +02006453 TestDeviceLocalMapped();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006454 TestMappingMultithreaded();
Adam Sawicki0876c0d2018-06-20 15:18:11 +02006455 TestLinearAllocator();
Adam Sawicki8cfe05f2018-08-22 16:48:17 +02006456 ManuallyTestLinearAllocator();
Adam Sawicki70a683e2018-08-24 15:36:32 +02006457 TestLinearAllocatorMultiBlock();
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006458
Adam Sawicki4338f662018-09-07 14:12:37 +02006459 BasicTestBuddyAllocator();
Adam Sawicki2e4d3ef2018-10-03 15:48:17 +02006460 BasicTestAllocatePages();
Adam Sawicki4338f662018-09-07 14:12:37 +02006461
Adam Sawickie73e9882020-03-20 18:05:42 +01006462 if(g_BufferDeviceAddressEnabled)
6463 TestBufferDeviceAddress();
Adam Sawickif2012052021-01-11 18:04:42 +01006464 if(VK_EXT_memory_priority_enabled)
6465 TestMemoryPriority();
Adam Sawickie73e9882020-03-20 18:05:42 +01006466
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006467 {
6468 FILE* file;
Adam Sawickic6432d12018-09-21 16:44:16 +02006469 fopen_s(&file, "Algorithms.csv", "w");
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006470 assert(file != NULL);
Adam Sawicki80927152018-09-07 17:27:23 +02006471 BenchmarkAlgorithms(file);
Adam Sawicki33d2ce72018-08-27 13:59:13 +02006472 fclose(file);
6473 }
6474
Adam Sawickib8333fb2018-03-13 16:15:53 +01006475 TestDefragmentationSimple();
6476 TestDefragmentationFull();
Adam Sawicki52076eb2018-11-22 16:14:50 +01006477 TestDefragmentationWholePool();
Adam Sawicki9a4f5082018-11-23 17:26:05 +01006478 TestDefragmentationGpu();
Adam Sawickia52012d2019-12-23 15:28:51 +01006479 TestDefragmentationIncrementalBasic();
6480 TestDefragmentationIncrementalComplex();
Adam Sawickib8333fb2018-03-13 16:15:53 +01006481
6482 // # Detailed tests
6483 FILE* file;
6484 fopen_s(&file, "Results.csv", "w");
6485 assert(file != NULL);
6486
6487 WriteMainTestResultHeader(file);
6488 PerformMainTests(file);
6489 //PerformCustomMainTest(file);
6490
6491 WritePoolTestResultHeader(file);
6492 PerformPoolTests(file);
6493 //PerformCustomPoolTest(file);
6494
6495 fclose(file);
Adam Sawicki4ac8ff82019-11-18 14:47:33 +01006496
Adam Sawickib8333fb2018-03-13 16:15:53 +01006497 wprintf(L"Done.\n");
6498}
6499
Adam Sawickif1a793c2018-03-13 15:42:22 +01006500#endif // #ifdef _WIN32