Adam Sawicki | ae5c466 | 2019-01-02 10:23:35 +0100 | [diff] [blame] | 1 | //
|
Adam Sawicki | aa18374 | 2021-02-16 17:28:49 +0100 | [diff] [blame] | 2 | // Copyright (c) 2017-2021 Advanced Micro Devices, Inc. All rights reserved.
|
Adam Sawicki | ae5c466 | 2019-01-02 10:23:35 +0100 | [diff] [blame] | 3 | //
|
| 4 | // Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 5 | // of this software and associated documentation files (the "Software"), to deal
|
| 6 | // in the Software without restriction, including without limitation the rights
|
| 7 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 8 | // copies of the Software, and to permit persons to whom the Software is
|
| 9 | // furnished to do so, subject to the following conditions:
|
| 10 | //
|
| 11 | // The above copyright notice and this permission notice shall be included in
|
| 12 | // all copies or substantial portions of the Software.
|
| 13 | //
|
| 14 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 15 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 16 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 17 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 18 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 19 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
| 20 | // THE SOFTWARE.
|
| 21 | //
|
| 22 |
|
Adam Sawicki | f1a793c | 2018-03-13 15:42:22 +0100 | [diff] [blame] | 23 | #include "Tests.h"
|
| 24 | #include "VmaUsage.h"
|
| 25 | #include "Common.h"
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 26 | #include <atomic>
|
| 27 | #include <thread>
|
| 28 | #include <mutex>
|
Adam Sawicki | 94ce3d7 | 2019-04-17 14:59:25 +0200 | [diff] [blame] | 29 | #include <functional>
|
Adam Sawicki | f1a793c | 2018-03-13 15:42:22 +0100 | [diff] [blame] | 30 |
|
| 31 | #ifdef _WIN32
|
| 32 |
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 33 | static const char* CODE_DESCRIPTION = "Foo";
|
| 34 |
|
Adam Sawicki | f297534 | 2018-10-16 13:49:02 +0200 | [diff] [blame] | 35 | extern VkCommandBuffer g_hTemporaryCommandBuffer;
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 36 | extern const VkAllocationCallbacks* g_Allocs;
|
Adam Sawicki | e73e988 | 2020-03-20 18:05:42 +0100 | [diff] [blame] | 37 | extern bool g_BufferDeviceAddressEnabled;
|
Adam Sawicki | f201205 | 2021-01-11 18:04:42 +0100 | [diff] [blame] | 38 | extern bool VK_EXT_memory_priority_enabled;
|
Adam Sawicki | e73e988 | 2020-03-20 18:05:42 +0100 | [diff] [blame] | 39 | extern PFN_vkGetBufferDeviceAddressEXT g_vkGetBufferDeviceAddressEXT;
|
Adam Sawicki | f297534 | 2018-10-16 13:49:02 +0200 | [diff] [blame] | 40 | void BeginSingleTimeCommands();
|
| 41 | void EndSingleTimeCommands();
|
| 42 |
|
Adam Sawicki | bdb89a9 | 2018-12-13 11:56:30 +0100 | [diff] [blame] | 43 | #ifndef VMA_DEBUG_MARGIN
|
| 44 | #define VMA_DEBUG_MARGIN 0
|
| 45 | #endif
|
| 46 |
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 47 | enum CONFIG_TYPE {
|
| 48 | CONFIG_TYPE_MINIMUM,
|
| 49 | CONFIG_TYPE_SMALL,
|
| 50 | CONFIG_TYPE_AVERAGE,
|
| 51 | CONFIG_TYPE_LARGE,
|
| 52 | CONFIG_TYPE_MAXIMUM,
|
| 53 | CONFIG_TYPE_COUNT
|
| 54 | };
|
| 55 |
|
Adam Sawicki | f297534 | 2018-10-16 13:49:02 +0200 | [diff] [blame] | 56 | static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_SMALL;
|
| 57 | //static constexpr CONFIG_TYPE ConfigType = CONFIG_TYPE_LARGE;
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 58 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 59 | enum class FREE_ORDER { FORWARD, BACKWARD, RANDOM, COUNT };
|
| 60 |
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 61 | static const char* FREE_ORDER_NAMES[] = {
|
| 62 | "FORWARD",
|
| 63 | "BACKWARD",
|
| 64 | "RANDOM",
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 65 | };
|
| 66 |
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 67 | // Copy of internal VmaAlgorithmToStr.
|
| 68 | static const char* AlgorithmToStr(uint32_t algorithm)
|
| 69 | {
|
| 70 | switch(algorithm)
|
| 71 | {
|
| 72 | case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT:
|
| 73 | return "Linear";
|
| 74 | case VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT:
|
| 75 | return "Buddy";
|
| 76 | case 0:
|
| 77 | return "Default";
|
| 78 | default:
|
| 79 | assert(0);
|
| 80 | return "";
|
| 81 | }
|
| 82 | }
|
| 83 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 84 | struct AllocationSize
|
| 85 | {
|
| 86 | uint32_t Probability;
|
| 87 | VkDeviceSize BufferSizeMin, BufferSizeMax;
|
| 88 | uint32_t ImageSizeMin, ImageSizeMax;
|
| 89 | };
|
| 90 |
|
| 91 | struct Config
|
| 92 | {
|
| 93 | uint32_t RandSeed;
|
| 94 | VkDeviceSize BeginBytesToAllocate;
|
| 95 | uint32_t AdditionalOperationCount;
|
| 96 | VkDeviceSize MaxBytesToAllocate;
|
| 97 | uint32_t MemUsageProbability[4]; // For VMA_MEMORY_USAGE_*
|
| 98 | std::vector<AllocationSize> AllocationSizes;
|
| 99 | uint32_t ThreadCount;
|
| 100 | uint32_t ThreadsUsingCommonAllocationsProbabilityPercent;
|
| 101 | FREE_ORDER FreeOrder;
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 102 | VmaAllocationCreateFlags AllocationStrategy; // For VMA_ALLOCATION_CREATE_STRATEGY_*
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 103 | };
|
| 104 |
|
| 105 | struct Result
|
| 106 | {
|
| 107 | duration TotalTime;
|
| 108 | duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
|
| 109 | duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
|
| 110 | VkDeviceSize TotalMemoryAllocated;
|
| 111 | VkDeviceSize FreeRangeSizeAvg, FreeRangeSizeMax;
|
| 112 | };
|
| 113 |
|
| 114 | void TestDefragmentationSimple();
|
| 115 | void TestDefragmentationFull();
|
| 116 |
|
| 117 | struct PoolTestConfig
|
| 118 | {
|
| 119 | uint32_t RandSeed;
|
| 120 | uint32_t ThreadCount;
|
| 121 | VkDeviceSize PoolSize;
|
| 122 | uint32_t FrameCount;
|
| 123 | uint32_t TotalItemCount;
|
| 124 | // Range for number of items used in each frame.
|
| 125 | uint32_t UsedItemCountMin, UsedItemCountMax;
|
| 126 | // Percent of items to make unused, and possibly make some others used in each frame.
|
| 127 | uint32_t ItemsToMakeUnusedPercent;
|
| 128 | std::vector<AllocationSize> AllocationSizes;
|
| 129 |
|
| 130 | VkDeviceSize CalcAvgResourceSize() const
|
| 131 | {
|
| 132 | uint32_t probabilitySum = 0;
|
| 133 | VkDeviceSize sizeSum = 0;
|
| 134 | for(size_t i = 0; i < AllocationSizes.size(); ++i)
|
| 135 | {
|
| 136 | const AllocationSize& allocSize = AllocationSizes[i];
|
| 137 | if(allocSize.BufferSizeMax > 0)
|
| 138 | sizeSum += (allocSize.BufferSizeMin + allocSize.BufferSizeMax) / 2 * allocSize.Probability;
|
| 139 | else
|
| 140 | {
|
| 141 | const VkDeviceSize avgDimension = (allocSize.ImageSizeMin + allocSize.ImageSizeMax) / 2;
|
| 142 | sizeSum += avgDimension * avgDimension * 4 * allocSize.Probability;
|
| 143 | }
|
| 144 | probabilitySum += allocSize.Probability;
|
| 145 | }
|
| 146 | return sizeSum / probabilitySum;
|
| 147 | }
|
| 148 |
|
| 149 | bool UsesBuffers() const
|
| 150 | {
|
| 151 | for(size_t i = 0; i < AllocationSizes.size(); ++i)
|
| 152 | if(AllocationSizes[i].BufferSizeMax > 0)
|
| 153 | return true;
|
| 154 | return false;
|
| 155 | }
|
| 156 |
|
| 157 | bool UsesImages() const
|
| 158 | {
|
| 159 | for(size_t i = 0; i < AllocationSizes.size(); ++i)
|
| 160 | if(AllocationSizes[i].ImageSizeMax > 0)
|
| 161 | return true;
|
| 162 | return false;
|
| 163 | }
|
| 164 | };
|
| 165 |
|
| 166 | struct PoolTestResult
|
| 167 | {
|
| 168 | duration TotalTime;
|
| 169 | duration AllocationTimeMin, AllocationTimeAvg, AllocationTimeMax;
|
| 170 | duration DeallocationTimeMin, DeallocationTimeAvg, DeallocationTimeMax;
|
| 171 | size_t LostAllocationCount, LostAllocationTotalSize;
|
| 172 | size_t FailedAllocationCount, FailedAllocationTotalSize;
|
| 173 | };
|
| 174 |
|
| 175 | static const uint32_t IMAGE_BYTES_PER_PIXEL = 1;
|
| 176 |
|
Adam Sawicki | 51fa966 | 2018-10-03 13:44:29 +0200 | [diff] [blame] | 177 | uint32_t g_FrameIndex = 0;
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 178 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 179 | struct BufferInfo
|
| 180 | {
|
| 181 | VkBuffer Buffer = VK_NULL_HANDLE;
|
| 182 | VmaAllocation Allocation = VK_NULL_HANDLE;
|
| 183 | };
|
| 184 |
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 185 | static uint32_t MemoryTypeToHeap(uint32_t memoryTypeIndex)
|
| 186 | {
|
| 187 | const VkPhysicalDeviceMemoryProperties* props;
|
| 188 | vmaGetMemoryProperties(g_hAllocator, &props);
|
| 189 | return props->memoryTypes[memoryTypeIndex].heapIndex;
|
| 190 | }
|
| 191 |
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 192 | static uint32_t GetAllocationStrategyCount()
|
| 193 | {
|
| 194 | uint32_t strategyCount = 0;
|
| 195 | switch(ConfigType)
|
| 196 | {
|
| 197 | case CONFIG_TYPE_MINIMUM: strategyCount = 1; break;
|
| 198 | case CONFIG_TYPE_SMALL: strategyCount = 1; break;
|
| 199 | case CONFIG_TYPE_AVERAGE: strategyCount = 2; break;
|
| 200 | case CONFIG_TYPE_LARGE: strategyCount = 2; break;
|
| 201 | case CONFIG_TYPE_MAXIMUM: strategyCount = 3; break;
|
| 202 | default: assert(0);
|
| 203 | }
|
| 204 | return strategyCount;
|
| 205 | }
|
| 206 |
|
| 207 | static const char* GetAllocationStrategyName(VmaAllocationCreateFlags allocStrategy)
|
| 208 | {
|
| 209 | switch(allocStrategy)
|
| 210 | {
|
| 211 | case VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT: return "BEST_FIT"; break;
|
| 212 | case VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT: return "WORST_FIT"; break;
|
| 213 | case VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT: return "FIRST_FIT"; break;
|
| 214 | case 0: return "Default"; break;
|
| 215 | default: assert(0); return "";
|
| 216 | }
|
| 217 | }
|
| 218 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 219 | static void InitResult(Result& outResult)
|
| 220 | {
|
| 221 | outResult.TotalTime = duration::zero();
|
| 222 | outResult.AllocationTimeMin = duration::max();
|
| 223 | outResult.AllocationTimeAvg = duration::zero();
|
| 224 | outResult.AllocationTimeMax = duration::min();
|
| 225 | outResult.DeallocationTimeMin = duration::max();
|
| 226 | outResult.DeallocationTimeAvg = duration::zero();
|
| 227 | outResult.DeallocationTimeMax = duration::min();
|
| 228 | outResult.TotalMemoryAllocated = 0;
|
| 229 | outResult.FreeRangeSizeAvg = 0;
|
| 230 | outResult.FreeRangeSizeMax = 0;
|
| 231 | }
|
| 232 |
|
| 233 | class TimeRegisterObj
|
| 234 | {
|
| 235 | public:
|
| 236 | TimeRegisterObj(duration& min, duration& sum, duration& max) :
|
| 237 | m_Min(min),
|
| 238 | m_Sum(sum),
|
| 239 | m_Max(max),
|
| 240 | m_TimeBeg(std::chrono::high_resolution_clock::now())
|
| 241 | {
|
| 242 | }
|
| 243 |
|
| 244 | ~TimeRegisterObj()
|
| 245 | {
|
| 246 | duration d = std::chrono::high_resolution_clock::now() - m_TimeBeg;
|
| 247 | m_Sum += d;
|
| 248 | if(d < m_Min) m_Min = d;
|
| 249 | if(d > m_Max) m_Max = d;
|
| 250 | }
|
| 251 |
|
| 252 | private:
|
| 253 | duration& m_Min;
|
| 254 | duration& m_Sum;
|
| 255 | duration& m_Max;
|
| 256 | time_point m_TimeBeg;
|
| 257 | };
|
| 258 |
|
| 259 | struct PoolTestThreadResult
|
| 260 | {
|
| 261 | duration AllocationTimeMin, AllocationTimeSum, AllocationTimeMax;
|
| 262 | duration DeallocationTimeMin, DeallocationTimeSum, DeallocationTimeMax;
|
| 263 | size_t AllocationCount, DeallocationCount;
|
| 264 | size_t LostAllocationCount, LostAllocationTotalSize;
|
| 265 | size_t FailedAllocationCount, FailedAllocationTotalSize;
|
| 266 | };
|
| 267 |
|
| 268 | class AllocationTimeRegisterObj : public TimeRegisterObj
|
| 269 | {
|
| 270 | public:
|
| 271 | AllocationTimeRegisterObj(Result& result) :
|
| 272 | TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeAvg, result.AllocationTimeMax)
|
| 273 | {
|
| 274 | }
|
| 275 | };
|
| 276 |
|
| 277 | class DeallocationTimeRegisterObj : public TimeRegisterObj
|
| 278 | {
|
| 279 | public:
|
| 280 | DeallocationTimeRegisterObj(Result& result) :
|
| 281 | TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeAvg, result.DeallocationTimeMax)
|
| 282 | {
|
| 283 | }
|
| 284 | };
|
| 285 |
|
| 286 | class PoolAllocationTimeRegisterObj : public TimeRegisterObj
|
| 287 | {
|
| 288 | public:
|
| 289 | PoolAllocationTimeRegisterObj(PoolTestThreadResult& result) :
|
| 290 | TimeRegisterObj(result.AllocationTimeMin, result.AllocationTimeSum, result.AllocationTimeMax)
|
| 291 | {
|
| 292 | }
|
| 293 | };
|
| 294 |
|
| 295 | class PoolDeallocationTimeRegisterObj : public TimeRegisterObj
|
| 296 | {
|
| 297 | public:
|
| 298 | PoolDeallocationTimeRegisterObj(PoolTestThreadResult& result) :
|
| 299 | TimeRegisterObj(result.DeallocationTimeMin, result.DeallocationTimeSum, result.DeallocationTimeMax)
|
| 300 | {
|
| 301 | }
|
| 302 | };
|
| 303 |
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 304 | static void CurrentTimeToStr(std::string& out)
|
| 305 | {
|
| 306 | time_t rawTime; time(&rawTime);
|
| 307 | struct tm timeInfo; localtime_s(&timeInfo, &rawTime);
|
| 308 | char timeStr[128];
|
| 309 | strftime(timeStr, _countof(timeStr), "%c", &timeInfo);
|
| 310 | out = timeStr;
|
| 311 | }
|
| 312 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 313 | VkResult MainTest(Result& outResult, const Config& config)
|
| 314 | {
|
| 315 | assert(config.ThreadCount > 0);
|
| 316 |
|
| 317 | InitResult(outResult);
|
| 318 |
|
| 319 | RandomNumberGenerator mainRand{config.RandSeed};
|
| 320 |
|
| 321 | time_point timeBeg = std::chrono::high_resolution_clock::now();
|
| 322 |
|
| 323 | std::atomic<size_t> allocationCount = 0;
|
| 324 | VkResult res = VK_SUCCESS;
|
| 325 |
|
| 326 | uint32_t memUsageProbabilitySum =
|
| 327 | config.MemUsageProbability[0] + config.MemUsageProbability[1] +
|
| 328 | config.MemUsageProbability[2] + config.MemUsageProbability[3];
|
| 329 | assert(memUsageProbabilitySum > 0);
|
| 330 |
|
| 331 | uint32_t allocationSizeProbabilitySum = std::accumulate(
|
| 332 | config.AllocationSizes.begin(),
|
| 333 | config.AllocationSizes.end(),
|
| 334 | 0u,
|
| 335 | [](uint32_t sum, const AllocationSize& allocSize) {
|
| 336 | return sum + allocSize.Probability;
|
| 337 | });
|
| 338 |
|
| 339 | struct Allocation
|
| 340 | {
|
| 341 | VkBuffer Buffer;
|
| 342 | VkImage Image;
|
| 343 | VmaAllocation Alloc;
|
| 344 | };
|
| 345 |
|
| 346 | std::vector<Allocation> commonAllocations;
|
| 347 | std::mutex commonAllocationsMutex;
|
| 348 |
|
| 349 | auto Allocate = [&](
|
| 350 | VkDeviceSize bufferSize,
|
| 351 | const VkExtent2D imageExtent,
|
| 352 | RandomNumberGenerator& localRand,
|
| 353 | VkDeviceSize& totalAllocatedBytes,
|
| 354 | std::vector<Allocation>& allocations) -> VkResult
|
| 355 | {
|
| 356 | assert((bufferSize == 0) != (imageExtent.width == 0 && imageExtent.height == 0));
|
| 357 |
|
| 358 | uint32_t memUsageIndex = 0;
|
| 359 | uint32_t memUsageRand = localRand.Generate() % memUsageProbabilitySum;
|
| 360 | while(memUsageRand >= config.MemUsageProbability[memUsageIndex])
|
| 361 | memUsageRand -= config.MemUsageProbability[memUsageIndex++];
|
| 362 |
|
| 363 | VmaAllocationCreateInfo memReq = {};
|
| 364 | memReq.usage = (VmaMemoryUsage)(VMA_MEMORY_USAGE_GPU_ONLY + memUsageIndex);
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 365 | memReq.flags |= config.AllocationStrategy;
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 366 |
|
| 367 | Allocation allocation = {};
|
| 368 | VmaAllocationInfo allocationInfo;
|
| 369 |
|
| 370 | // Buffer
|
| 371 | if(bufferSize > 0)
|
| 372 | {
|
| 373 | assert(imageExtent.width == 0);
|
| 374 | VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 375 | bufferInfo.size = bufferSize;
|
| 376 | bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
|
| 377 |
|
| 378 | {
|
| 379 | AllocationTimeRegisterObj timeRegisterObj{outResult};
|
| 380 | res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &memReq, &allocation.Buffer, &allocation.Alloc, &allocationInfo);
|
| 381 | }
|
| 382 | }
|
| 383 | // Image
|
| 384 | else
|
| 385 | {
|
| 386 | VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
|
| 387 | imageInfo.imageType = VK_IMAGE_TYPE_2D;
|
| 388 | imageInfo.extent.width = imageExtent.width;
|
| 389 | imageInfo.extent.height = imageExtent.height;
|
| 390 | imageInfo.extent.depth = 1;
|
| 391 | imageInfo.mipLevels = 1;
|
| 392 | imageInfo.arrayLayers = 1;
|
| 393 | imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
|
| 394 | imageInfo.tiling = memReq.usage == VMA_MEMORY_USAGE_GPU_ONLY ?
|
| 395 | VK_IMAGE_TILING_OPTIMAL :
|
| 396 | VK_IMAGE_TILING_LINEAR;
|
| 397 | imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
|
| 398 | switch(memReq.usage)
|
| 399 | {
|
| 400 | case VMA_MEMORY_USAGE_GPU_ONLY:
|
| 401 | switch(localRand.Generate() % 3)
|
| 402 | {
|
| 403 | case 0:
|
| 404 | imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
|
| 405 | break;
|
| 406 | case 1:
|
| 407 | imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
|
| 408 | break;
|
| 409 | case 2:
|
| 410 | imageInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
|
| 411 | break;
|
| 412 | }
|
| 413 | break;
|
| 414 | case VMA_MEMORY_USAGE_CPU_ONLY:
|
| 415 | case VMA_MEMORY_USAGE_CPU_TO_GPU:
|
| 416 | imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
|
| 417 | break;
|
| 418 | case VMA_MEMORY_USAGE_GPU_TO_CPU:
|
| 419 | imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
|
| 420 | break;
|
| 421 | }
|
| 422 | imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
| 423 | imageInfo.flags = 0;
|
| 424 |
|
| 425 | {
|
| 426 | AllocationTimeRegisterObj timeRegisterObj{outResult};
|
| 427 | res = vmaCreateImage(g_hAllocator, &imageInfo, &memReq, &allocation.Image, &allocation.Alloc, &allocationInfo);
|
| 428 | }
|
| 429 | }
|
| 430 |
|
| 431 | if(res == VK_SUCCESS)
|
| 432 | {
|
| 433 | ++allocationCount;
|
| 434 | totalAllocatedBytes += allocationInfo.size;
|
| 435 | bool useCommonAllocations = localRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
|
| 436 | if(useCommonAllocations)
|
| 437 | {
|
| 438 | std::unique_lock<std::mutex> lock(commonAllocationsMutex);
|
| 439 | commonAllocations.push_back(allocation);
|
| 440 | }
|
| 441 | else
|
| 442 | allocations.push_back(allocation);
|
| 443 | }
|
| 444 | else
|
| 445 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 446 | TEST(0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 447 | }
|
| 448 | return res;
|
| 449 | };
|
| 450 |
|
| 451 | auto GetNextAllocationSize = [&](
|
| 452 | VkDeviceSize& outBufSize,
|
| 453 | VkExtent2D& outImageSize,
|
| 454 | RandomNumberGenerator& localRand)
|
| 455 | {
|
| 456 | outBufSize = 0;
|
| 457 | outImageSize = {0, 0};
|
| 458 |
|
| 459 | uint32_t allocSizeIndex = 0;
|
| 460 | uint32_t r = localRand.Generate() % allocationSizeProbabilitySum;
|
| 461 | while(r >= config.AllocationSizes[allocSizeIndex].Probability)
|
| 462 | r -= config.AllocationSizes[allocSizeIndex++].Probability;
|
| 463 |
|
| 464 | const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
|
| 465 | if(allocSize.BufferSizeMax > 0)
|
| 466 | {
|
| 467 | assert(allocSize.ImageSizeMax == 0);
|
| 468 | if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
|
| 469 | outBufSize = allocSize.BufferSizeMin;
|
| 470 | else
|
| 471 | {
|
| 472 | outBufSize = allocSize.BufferSizeMin + localRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
|
| 473 | outBufSize = outBufSize / 16 * 16;
|
| 474 | }
|
| 475 | }
|
| 476 | else
|
| 477 | {
|
| 478 | if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
|
| 479 | outImageSize.width = outImageSize.height = allocSize.ImageSizeMax;
|
| 480 | else
|
| 481 | {
|
| 482 | outImageSize.width = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
|
| 483 | outImageSize.height = allocSize.ImageSizeMin + localRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
|
| 484 | }
|
| 485 | }
|
| 486 | };
|
| 487 |
|
| 488 | std::atomic<uint32_t> numThreadsReachedMaxAllocations = 0;
|
| 489 | HANDLE threadsFinishEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
|
| 490 |
|
| 491 | auto ThreadProc = [&](uint32_t randSeed) -> void
|
| 492 | {
|
| 493 | RandomNumberGenerator threadRand(randSeed);
|
| 494 | VkDeviceSize threadTotalAllocatedBytes = 0;
|
| 495 | std::vector<Allocation> threadAllocations;
|
| 496 | VkDeviceSize threadBeginBytesToAllocate = config.BeginBytesToAllocate / config.ThreadCount;
|
| 497 | VkDeviceSize threadMaxBytesToAllocate = config.MaxBytesToAllocate / config.ThreadCount;
|
| 498 | uint32_t threadAdditionalOperationCount = config.AdditionalOperationCount / config.ThreadCount;
|
| 499 |
|
| 500 | // BEGIN ALLOCATIONS
|
| 501 | for(;;)
|
| 502 | {
|
| 503 | VkDeviceSize bufferSize = 0;
|
| 504 | VkExtent2D imageExtent = {};
|
| 505 | GetNextAllocationSize(bufferSize, imageExtent, threadRand);
|
| 506 | if(threadTotalAllocatedBytes + bufferSize + imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
|
| 507 | threadBeginBytesToAllocate)
|
| 508 | {
|
| 509 | if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
|
| 510 | break;
|
| 511 | }
|
| 512 | else
|
| 513 | break;
|
| 514 | }
|
| 515 |
|
| 516 | // ADDITIONAL ALLOCATIONS AND FREES
|
| 517 | for(size_t i = 0; i < threadAdditionalOperationCount; ++i)
|
| 518 | {
|
| 519 | VkDeviceSize bufferSize = 0;
|
| 520 | VkExtent2D imageExtent = {};
|
| 521 | GetNextAllocationSize(bufferSize, imageExtent, threadRand);
|
| 522 |
|
| 523 | // true = allocate, false = free
|
| 524 | bool allocate = threadRand.Generate() % 2 != 0;
|
| 525 |
|
| 526 | if(allocate)
|
| 527 | {
|
| 528 | if(threadTotalAllocatedBytes +
|
| 529 | bufferSize +
|
| 530 | imageExtent.width * imageExtent.height * IMAGE_BYTES_PER_PIXEL <
|
| 531 | threadMaxBytesToAllocate)
|
| 532 | {
|
| 533 | if(Allocate(bufferSize, imageExtent, threadRand, threadTotalAllocatedBytes, threadAllocations) != VK_SUCCESS)
|
| 534 | break;
|
| 535 | }
|
| 536 | }
|
| 537 | else
|
| 538 | {
|
| 539 | bool useCommonAllocations = threadRand.Generate() % 100 < config.ThreadsUsingCommonAllocationsProbabilityPercent;
|
| 540 | if(useCommonAllocations)
|
| 541 | {
|
| 542 | std::unique_lock<std::mutex> lock(commonAllocationsMutex);
|
| 543 | if(!commonAllocations.empty())
|
| 544 | {
|
| 545 | size_t indexToFree = threadRand.Generate() % commonAllocations.size();
|
| 546 | VmaAllocationInfo allocationInfo;
|
| 547 | vmaGetAllocationInfo(g_hAllocator, commonAllocations[indexToFree].Alloc, &allocationInfo);
|
| 548 | if(threadTotalAllocatedBytes >= allocationInfo.size)
|
| 549 | {
|
| 550 | DeallocationTimeRegisterObj timeRegisterObj{outResult};
|
| 551 | if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
|
| 552 | vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
|
| 553 | else
|
| 554 | vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
|
| 555 | threadTotalAllocatedBytes -= allocationInfo.size;
|
| 556 | commonAllocations.erase(commonAllocations.begin() + indexToFree);
|
| 557 | }
|
| 558 | }
|
| 559 | }
|
| 560 | else
|
| 561 | {
|
| 562 | if(!threadAllocations.empty())
|
| 563 | {
|
| 564 | size_t indexToFree = threadRand.Generate() % threadAllocations.size();
|
| 565 | VmaAllocationInfo allocationInfo;
|
| 566 | vmaGetAllocationInfo(g_hAllocator, threadAllocations[indexToFree].Alloc, &allocationInfo);
|
| 567 | if(threadTotalAllocatedBytes >= allocationInfo.size)
|
| 568 | {
|
| 569 | DeallocationTimeRegisterObj timeRegisterObj{outResult};
|
| 570 | if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
|
| 571 | vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
|
| 572 | else
|
| 573 | vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
|
| 574 | threadTotalAllocatedBytes -= allocationInfo.size;
|
| 575 | threadAllocations.erase(threadAllocations.begin() + indexToFree);
|
| 576 | }
|
| 577 | }
|
| 578 | }
|
| 579 | }
|
| 580 | }
|
| 581 |
|
| 582 | ++numThreadsReachedMaxAllocations;
|
| 583 |
|
| 584 | WaitForSingleObject(threadsFinishEvent, INFINITE);
|
| 585 |
|
| 586 | // DEALLOCATION
|
| 587 | while(!threadAllocations.empty())
|
| 588 | {
|
| 589 | size_t indexToFree = 0;
|
| 590 | switch(config.FreeOrder)
|
| 591 | {
|
| 592 | case FREE_ORDER::FORWARD:
|
| 593 | indexToFree = 0;
|
| 594 | break;
|
| 595 | case FREE_ORDER::BACKWARD:
|
| 596 | indexToFree = threadAllocations.size() - 1;
|
| 597 | break;
|
| 598 | case FREE_ORDER::RANDOM:
|
| 599 | indexToFree = mainRand.Generate() % threadAllocations.size();
|
| 600 | break;
|
| 601 | }
|
| 602 |
|
| 603 | {
|
| 604 | DeallocationTimeRegisterObj timeRegisterObj{outResult};
|
| 605 | if(threadAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
|
| 606 | vmaDestroyBuffer(g_hAllocator, threadAllocations[indexToFree].Buffer, threadAllocations[indexToFree].Alloc);
|
| 607 | else
|
| 608 | vmaDestroyImage(g_hAllocator, threadAllocations[indexToFree].Image, threadAllocations[indexToFree].Alloc);
|
| 609 | }
|
| 610 | threadAllocations.erase(threadAllocations.begin() + indexToFree);
|
| 611 | }
|
| 612 | };
|
| 613 |
|
| 614 | uint32_t threadRandSeed = mainRand.Generate();
|
| 615 | std::vector<std::thread> bkgThreads;
|
| 616 | for(size_t i = 0; i < config.ThreadCount; ++i)
|
| 617 | {
|
| 618 | bkgThreads.emplace_back(std::bind(ThreadProc, threadRandSeed + (uint32_t)i));
|
| 619 | }
|
| 620 |
|
| 621 | // Wait for threads reached max allocations
|
| 622 | while(numThreadsReachedMaxAllocations < config.ThreadCount)
|
| 623 | Sleep(0);
|
| 624 |
|
| 625 | // CALCULATE MEMORY STATISTICS ON FINAL USAGE
|
| 626 | VmaStats vmaStats = {};
|
| 627 | vmaCalculateStats(g_hAllocator, &vmaStats);
|
| 628 | outResult.TotalMemoryAllocated = vmaStats.total.usedBytes + vmaStats.total.unusedBytes;
|
| 629 | outResult.FreeRangeSizeMax = vmaStats.total.unusedRangeSizeMax;
|
| 630 | outResult.FreeRangeSizeAvg = vmaStats.total.unusedRangeSizeAvg;
|
| 631 |
|
| 632 | // Signal threads to deallocate
|
| 633 | SetEvent(threadsFinishEvent);
|
| 634 |
|
| 635 | // Wait for threads finished
|
| 636 | for(size_t i = 0; i < bkgThreads.size(); ++i)
|
| 637 | bkgThreads[i].join();
|
| 638 | bkgThreads.clear();
|
| 639 |
|
| 640 | CloseHandle(threadsFinishEvent);
|
| 641 |
|
| 642 | // Deallocate remaining common resources
|
| 643 | while(!commonAllocations.empty())
|
| 644 | {
|
| 645 | size_t indexToFree = 0;
|
| 646 | switch(config.FreeOrder)
|
| 647 | {
|
| 648 | case FREE_ORDER::FORWARD:
|
| 649 | indexToFree = 0;
|
| 650 | break;
|
| 651 | case FREE_ORDER::BACKWARD:
|
| 652 | indexToFree = commonAllocations.size() - 1;
|
| 653 | break;
|
| 654 | case FREE_ORDER::RANDOM:
|
| 655 | indexToFree = mainRand.Generate() % commonAllocations.size();
|
| 656 | break;
|
| 657 | }
|
| 658 |
|
| 659 | {
|
| 660 | DeallocationTimeRegisterObj timeRegisterObj{outResult};
|
| 661 | if(commonAllocations[indexToFree].Buffer != VK_NULL_HANDLE)
|
| 662 | vmaDestroyBuffer(g_hAllocator, commonAllocations[indexToFree].Buffer, commonAllocations[indexToFree].Alloc);
|
| 663 | else
|
| 664 | vmaDestroyImage(g_hAllocator, commonAllocations[indexToFree].Image, commonAllocations[indexToFree].Alloc);
|
| 665 | }
|
| 666 | commonAllocations.erase(commonAllocations.begin() + indexToFree);
|
| 667 | }
|
| 668 |
|
| 669 | if(allocationCount)
|
| 670 | {
|
| 671 | outResult.AllocationTimeAvg /= allocationCount;
|
| 672 | outResult.DeallocationTimeAvg /= allocationCount;
|
| 673 | }
|
| 674 |
|
| 675 | outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
|
| 676 |
|
| 677 | return res;
|
| 678 | }
|
| 679 |
|
Adam Sawicki | 51fa966 | 2018-10-03 13:44:29 +0200 | [diff] [blame] | 680 | void SaveAllocatorStatsToFile(const wchar_t* filePath)
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 681 | {
|
Adam Sawicki | 4d844e2 | 2019-01-24 16:21:05 +0100 | [diff] [blame] | 682 | wprintf(L"Saving JSON dump to file \"%s\"\n", filePath);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 683 | char* stats;
|
Adam Sawicki | e44c626 | 2018-06-15 14:30:39 +0200 | [diff] [blame] | 684 | vmaBuildStatsString(g_hAllocator, &stats, VK_TRUE);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 685 | SaveFile(filePath, stats, strlen(stats));
|
Adam Sawicki | e44c626 | 2018-06-15 14:30:39 +0200 | [diff] [blame] | 686 | vmaFreeStatsString(g_hAllocator, stats);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 687 | }
|
| 688 |
|
| 689 | struct AllocInfo
|
| 690 | {
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 691 | VmaAllocation m_Allocation = VK_NULL_HANDLE;
|
| 692 | VkBuffer m_Buffer = VK_NULL_HANDLE;
|
| 693 | VkImage m_Image = VK_NULL_HANDLE;
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 694 | VkImageLayout m_ImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 695 | uint32_t m_StartValue = 0;
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 696 | union
|
| 697 | {
|
| 698 | VkBufferCreateInfo m_BufferInfo;
|
| 699 | VkImageCreateInfo m_ImageInfo;
|
| 700 | };
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 701 |
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 702 | // After defragmentation.
|
| 703 | VkBuffer m_NewBuffer = VK_NULL_HANDLE;
|
| 704 | VkImage m_NewImage = VK_NULL_HANDLE;
|
| 705 |
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 706 | void CreateBuffer(
|
| 707 | const VkBufferCreateInfo& bufCreateInfo,
|
| 708 | const VmaAllocationCreateInfo& allocCreateInfo);
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 709 | void CreateImage(
|
| 710 | const VkImageCreateInfo& imageCreateInfo,
|
| 711 | const VmaAllocationCreateInfo& allocCreateInfo,
|
| 712 | VkImageLayout layout);
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 713 | void Destroy();
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 714 | };
|
| 715 |
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 716 | void AllocInfo::CreateBuffer(
|
| 717 | const VkBufferCreateInfo& bufCreateInfo,
|
| 718 | const VmaAllocationCreateInfo& allocCreateInfo)
|
| 719 | {
|
| 720 | m_BufferInfo = bufCreateInfo;
|
| 721 | VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &m_Buffer, &m_Allocation, nullptr);
|
| 722 | TEST(res == VK_SUCCESS);
|
| 723 | }
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 724 | void AllocInfo::CreateImage(
|
| 725 | const VkImageCreateInfo& imageCreateInfo,
|
| 726 | const VmaAllocationCreateInfo& allocCreateInfo,
|
| 727 | VkImageLayout layout)
|
| 728 | {
|
| 729 | m_ImageInfo = imageCreateInfo;
|
| 730 | m_ImageLayout = layout;
|
| 731 | VkResult res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &m_Image, &m_Allocation, nullptr);
|
| 732 | TEST(res == VK_SUCCESS);
|
| 733 | }
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 734 |
|
| 735 | void AllocInfo::Destroy()
|
| 736 | {
|
| 737 | if(m_Image)
|
| 738 | {
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 739 | assert(!m_Buffer);
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 740 | vkDestroyImage(g_hDevice, m_Image, g_Allocs);
|
Adam Sawicki | ddcbf8c | 2019-11-22 15:22:42 +0100 | [diff] [blame] | 741 | m_Image = VK_NULL_HANDLE;
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 742 | }
|
| 743 | if(m_Buffer)
|
| 744 | {
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 745 | assert(!m_Image);
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 746 | vkDestroyBuffer(g_hDevice, m_Buffer, g_Allocs);
|
Adam Sawicki | ddcbf8c | 2019-11-22 15:22:42 +0100 | [diff] [blame] | 747 | m_Buffer = VK_NULL_HANDLE;
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 748 | }
|
| 749 | if(m_Allocation)
|
| 750 | {
|
| 751 | vmaFreeMemory(g_hAllocator, m_Allocation);
|
Adam Sawicki | ddcbf8c | 2019-11-22 15:22:42 +0100 | [diff] [blame] | 752 | m_Allocation = VK_NULL_HANDLE;
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 753 | }
|
| 754 | }
|
| 755 |
|
Adam Sawicki | f297534 | 2018-10-16 13:49:02 +0200 | [diff] [blame] | 756 | class StagingBufferCollection
|
| 757 | {
|
| 758 | public:
|
| 759 | StagingBufferCollection() { }
|
| 760 | ~StagingBufferCollection();
|
| 761 | // Returns false if maximum total size of buffers would be exceeded.
|
| 762 | bool AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr);
|
| 763 | void ReleaseAllBuffers();
|
| 764 |
|
| 765 | private:
|
| 766 | static const VkDeviceSize MAX_TOTAL_SIZE = 256ull * 1024 * 1024;
|
| 767 | struct BufInfo
|
| 768 | {
|
| 769 | VmaAllocation Allocation = VK_NULL_HANDLE;
|
| 770 | VkBuffer Buffer = VK_NULL_HANDLE;
|
| 771 | VkDeviceSize Size = VK_WHOLE_SIZE;
|
| 772 | void* MappedPtr = nullptr;
|
| 773 | bool Used = false;
|
| 774 | };
|
| 775 | std::vector<BufInfo> m_Bufs;
|
| 776 | // Including both used and unused.
|
| 777 | VkDeviceSize m_TotalSize = 0;
|
| 778 | };
|
| 779 |
|
| 780 | StagingBufferCollection::~StagingBufferCollection()
|
| 781 | {
|
| 782 | for(size_t i = m_Bufs.size(); i--; )
|
| 783 | {
|
| 784 | vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
|
| 785 | }
|
| 786 | }
|
| 787 |
|
| 788 | bool StagingBufferCollection::AcquireBuffer(VkDeviceSize size, VkBuffer& outBuffer, void*& outMappedPtr)
|
| 789 | {
|
| 790 | assert(size <= MAX_TOTAL_SIZE);
|
| 791 |
|
| 792 | // Try to find existing unused buffer with best size.
|
| 793 | size_t bestIndex = SIZE_MAX;
|
| 794 | for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
|
| 795 | {
|
| 796 | BufInfo& currBufInfo = m_Bufs[i];
|
| 797 | if(!currBufInfo.Used && currBufInfo.Size >= size &&
|
| 798 | (bestIndex == SIZE_MAX || currBufInfo.Size < m_Bufs[bestIndex].Size))
|
| 799 | {
|
| 800 | bestIndex = i;
|
| 801 | }
|
| 802 | }
|
| 803 |
|
| 804 | if(bestIndex != SIZE_MAX)
|
| 805 | {
|
| 806 | m_Bufs[bestIndex].Used = true;
|
| 807 | outBuffer = m_Bufs[bestIndex].Buffer;
|
| 808 | outMappedPtr = m_Bufs[bestIndex].MappedPtr;
|
| 809 | return true;
|
| 810 | }
|
| 811 |
|
| 812 | // Allocate new buffer with requested size.
|
| 813 | if(m_TotalSize + size <= MAX_TOTAL_SIZE)
|
| 814 | {
|
| 815 | BufInfo bufInfo;
|
| 816 | bufInfo.Size = size;
|
| 817 | bufInfo.Used = true;
|
| 818 |
|
| 819 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 820 | bufCreateInfo.size = size;
|
| 821 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
| 822 |
|
| 823 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 824 | allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 825 | allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
|
| 826 |
|
| 827 | VmaAllocationInfo allocInfo;
|
| 828 | VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
|
| 829 | bufInfo.MappedPtr = allocInfo.pMappedData;
|
| 830 | TEST(res == VK_SUCCESS && bufInfo.MappedPtr);
|
| 831 |
|
| 832 | outBuffer = bufInfo.Buffer;
|
| 833 | outMappedPtr = bufInfo.MappedPtr;
|
| 834 |
|
| 835 | m_Bufs.push_back(std::move(bufInfo));
|
| 836 |
|
| 837 | m_TotalSize += size;
|
| 838 |
|
| 839 | return true;
|
| 840 | }
|
| 841 |
|
| 842 | // There are some unused but smaller buffers: Free them and try again.
|
| 843 | bool hasUnused = false;
|
| 844 | for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
|
| 845 | {
|
| 846 | if(!m_Bufs[i].Used)
|
| 847 | {
|
| 848 | hasUnused = true;
|
| 849 | break;
|
| 850 | }
|
| 851 | }
|
| 852 | if(hasUnused)
|
| 853 | {
|
| 854 | for(size_t i = m_Bufs.size(); i--; )
|
| 855 | {
|
| 856 | if(!m_Bufs[i].Used)
|
| 857 | {
|
| 858 | m_TotalSize -= m_Bufs[i].Size;
|
| 859 | vmaDestroyBuffer(g_hAllocator, m_Bufs[i].Buffer, m_Bufs[i].Allocation);
|
| 860 | m_Bufs.erase(m_Bufs.begin() + i);
|
| 861 | }
|
| 862 | }
|
| 863 |
|
| 864 | return AcquireBuffer(size, outBuffer, outMappedPtr);
|
| 865 | }
|
| 866 |
|
| 867 | return false;
|
| 868 | }
|
| 869 |
|
| 870 | void StagingBufferCollection::ReleaseAllBuffers()
|
| 871 | {
|
| 872 | for(size_t i = 0, count = m_Bufs.size(); i < count; ++i)
|
| 873 | {
|
| 874 | m_Bufs[i].Used = false;
|
| 875 | }
|
| 876 | }
|
| 877 |
|
| 878 | static void UploadGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
|
| 879 | {
|
| 880 | StagingBufferCollection stagingBufs;
|
| 881 |
|
| 882 | bool cmdBufferStarted = false;
|
| 883 | for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
|
| 884 | {
|
| 885 | const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
|
| 886 | if(currAllocInfo.m_Buffer)
|
| 887 | {
|
| 888 | const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
|
| 889 |
|
| 890 | VkBuffer stagingBuf = VK_NULL_HANDLE;
|
| 891 | void* stagingBufMappedPtr = nullptr;
|
| 892 | if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
|
| 893 | {
|
| 894 | TEST(cmdBufferStarted);
|
| 895 | EndSingleTimeCommands();
|
| 896 | stagingBufs.ReleaseAllBuffers();
|
| 897 | cmdBufferStarted = false;
|
| 898 |
|
| 899 | bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
|
| 900 | TEST(ok);
|
| 901 | }
|
| 902 |
|
| 903 | // Fill staging buffer.
|
| 904 | {
|
| 905 | assert(size % sizeof(uint32_t) == 0);
|
| 906 | uint32_t* stagingValPtr = (uint32_t*)stagingBufMappedPtr;
|
| 907 | uint32_t val = currAllocInfo.m_StartValue;
|
| 908 | for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
|
| 909 | {
|
| 910 | *stagingValPtr = val;
|
| 911 | ++stagingValPtr;
|
| 912 | ++val;
|
| 913 | }
|
| 914 | }
|
| 915 |
|
| 916 | // Issue copy command from staging buffer to destination buffer.
|
| 917 | if(!cmdBufferStarted)
|
| 918 | {
|
| 919 | cmdBufferStarted = true;
|
| 920 | BeginSingleTimeCommands();
|
| 921 | }
|
| 922 |
|
| 923 | VkBufferCopy copy = {};
|
| 924 | copy.srcOffset = 0;
|
| 925 | copy.dstOffset = 0;
|
| 926 | copy.size = size;
|
| 927 | vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Buffer, 1, ©);
|
| 928 | }
|
| 929 | else
|
| 930 | {
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 931 | TEST(currAllocInfo.m_ImageInfo.format == VK_FORMAT_R8G8B8A8_UNORM && "Only RGBA8 images are currently supported.");
|
| 932 | TEST(currAllocInfo.m_ImageInfo.mipLevels == 1 && "Only single mip images are currently supported.");
|
| 933 |
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 934 | const VkDeviceSize size = (VkDeviceSize)currAllocInfo.m_ImageInfo.extent.width * currAllocInfo.m_ImageInfo.extent.height * sizeof(uint32_t);
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 935 |
|
| 936 | VkBuffer stagingBuf = VK_NULL_HANDLE;
|
| 937 | void* stagingBufMappedPtr = nullptr;
|
| 938 | if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
|
| 939 | {
|
| 940 | TEST(cmdBufferStarted);
|
| 941 | EndSingleTimeCommands();
|
| 942 | stagingBufs.ReleaseAllBuffers();
|
| 943 | cmdBufferStarted = false;
|
| 944 |
|
| 945 | bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
|
| 946 | TEST(ok);
|
| 947 | }
|
| 948 |
|
| 949 | // Fill staging buffer.
|
| 950 | {
|
| 951 | assert(size % sizeof(uint32_t) == 0);
|
| 952 | uint32_t *stagingValPtr = (uint32_t *)stagingBufMappedPtr;
|
| 953 | uint32_t val = currAllocInfo.m_StartValue;
|
| 954 | for(size_t i = 0; i < size / sizeof(uint32_t); ++i)
|
| 955 | {
|
| 956 | *stagingValPtr = val;
|
| 957 | ++stagingValPtr;
|
| 958 | ++val;
|
| 959 | }
|
| 960 | }
|
| 961 |
|
| 962 | // Issue copy command from staging buffer to destination buffer.
|
| 963 | if(!cmdBufferStarted)
|
| 964 | {
|
| 965 | cmdBufferStarted = true;
|
| 966 | BeginSingleTimeCommands();
|
| 967 | }
|
| 968 |
|
| 969 |
|
| 970 | // Transfer to transfer dst layout
|
| 971 | VkImageSubresourceRange subresourceRange = {
|
| 972 | VK_IMAGE_ASPECT_COLOR_BIT,
|
| 973 | 0, VK_REMAINING_MIP_LEVELS,
|
| 974 | 0, VK_REMAINING_ARRAY_LAYERS
|
| 975 | };
|
| 976 |
|
| 977 | VkImageMemoryBarrier barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
|
| 978 | barrier.srcAccessMask = 0;
|
| 979 | barrier.dstAccessMask = 0;
|
| 980 | barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
| 981 | barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
| 982 | barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
| 983 | barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
| 984 | barrier.image = currAllocInfo.m_Image;
|
| 985 | barrier.subresourceRange = subresourceRange;
|
| 986 |
|
| 987 | vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,
|
| 988 | 0, nullptr,
|
| 989 | 0, nullptr,
|
| 990 | 1, &barrier);
|
| 991 |
|
| 992 | // Copy image date
|
| 993 | VkBufferImageCopy copy = {};
|
| 994 | copy.bufferOffset = 0;
|
| 995 | copy.bufferRowLength = 0;
|
| 996 | copy.bufferImageHeight = 0;
|
| 997 | copy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
| 998 | copy.imageSubresource.layerCount = 1;
|
| 999 | copy.imageExtent = currAllocInfo.m_ImageInfo.extent;
|
| 1000 |
|
| 1001 | vkCmdCopyBufferToImage(g_hTemporaryCommandBuffer, stagingBuf, currAllocInfo.m_Image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ©);
|
| 1002 |
|
| 1003 | // Transfer to desired layout
|
| 1004 | barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
| 1005 | barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
|
| 1006 | barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
| 1007 | barrier.newLayout = currAllocInfo.m_ImageLayout;
|
| 1008 |
|
| 1009 | vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0,
|
| 1010 | 0, nullptr,
|
| 1011 | 0, nullptr,
|
| 1012 | 1, &barrier);
|
Adam Sawicki | f297534 | 2018-10-16 13:49:02 +0200 | [diff] [blame] | 1013 | }
|
| 1014 | }
|
| 1015 |
|
| 1016 | if(cmdBufferStarted)
|
| 1017 | {
|
| 1018 | EndSingleTimeCommands();
|
| 1019 | stagingBufs.ReleaseAllBuffers();
|
| 1020 | }
|
| 1021 | }
|
| 1022 |
|
| 1023 | static void ValidateGpuData(const AllocInfo* allocInfo, size_t allocInfoCount)
|
| 1024 | {
|
| 1025 | StagingBufferCollection stagingBufs;
|
| 1026 |
|
| 1027 | bool cmdBufferStarted = false;
|
| 1028 | size_t validateAllocIndexOffset = 0;
|
| 1029 | std::vector<void*> validateStagingBuffers;
|
| 1030 | for(size_t allocInfoIndex = 0; allocInfoIndex < allocInfoCount; ++allocInfoIndex)
|
| 1031 | {
|
| 1032 | const AllocInfo& currAllocInfo = allocInfo[allocInfoIndex];
|
| 1033 | if(currAllocInfo.m_Buffer)
|
| 1034 | {
|
| 1035 | const VkDeviceSize size = currAllocInfo.m_BufferInfo.size;
|
| 1036 |
|
| 1037 | VkBuffer stagingBuf = VK_NULL_HANDLE;
|
| 1038 | void* stagingBufMappedPtr = nullptr;
|
| 1039 | if(!stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr))
|
| 1040 | {
|
| 1041 | TEST(cmdBufferStarted);
|
| 1042 | EndSingleTimeCommands();
|
| 1043 | cmdBufferStarted = false;
|
| 1044 |
|
| 1045 | for(size_t validateIndex = 0;
|
| 1046 | validateIndex < validateStagingBuffers.size();
|
| 1047 | ++validateIndex)
|
| 1048 | {
|
| 1049 | const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
|
| 1050 | const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
|
| 1051 | TEST(validateSize % sizeof(uint32_t) == 0);
|
| 1052 | const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
|
| 1053 | uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
|
| 1054 | bool valid = true;
|
| 1055 | for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
|
| 1056 | {
|
| 1057 | if(*stagingValPtr != val)
|
| 1058 | {
|
| 1059 | valid = false;
|
| 1060 | break;
|
| 1061 | }
|
| 1062 | ++stagingValPtr;
|
| 1063 | ++val;
|
| 1064 | }
|
| 1065 | TEST(valid);
|
| 1066 | }
|
| 1067 |
|
| 1068 | stagingBufs.ReleaseAllBuffers();
|
| 1069 |
|
| 1070 | validateAllocIndexOffset = allocInfoIndex;
|
| 1071 | validateStagingBuffers.clear();
|
| 1072 |
|
| 1073 | bool ok = stagingBufs.AcquireBuffer(size, stagingBuf, stagingBufMappedPtr);
|
| 1074 | TEST(ok);
|
| 1075 | }
|
| 1076 |
|
| 1077 | // Issue copy command from staging buffer to destination buffer.
|
| 1078 | if(!cmdBufferStarted)
|
| 1079 | {
|
| 1080 | cmdBufferStarted = true;
|
| 1081 | BeginSingleTimeCommands();
|
| 1082 | }
|
| 1083 |
|
| 1084 | VkBufferCopy copy = {};
|
| 1085 | copy.srcOffset = 0;
|
| 1086 | copy.dstOffset = 0;
|
| 1087 | copy.size = size;
|
| 1088 | vkCmdCopyBuffer(g_hTemporaryCommandBuffer, currAllocInfo.m_Buffer, stagingBuf, 1, ©);
|
| 1089 |
|
| 1090 | // Sava mapped pointer for later validation.
|
| 1091 | validateStagingBuffers.push_back(stagingBufMappedPtr);
|
| 1092 | }
|
| 1093 | else
|
| 1094 | {
|
| 1095 | TEST(0 && "Images not currently supported.");
|
| 1096 | }
|
| 1097 | }
|
| 1098 |
|
| 1099 | if(cmdBufferStarted)
|
| 1100 | {
|
| 1101 | EndSingleTimeCommands();
|
| 1102 |
|
| 1103 | for(size_t validateIndex = 0;
|
| 1104 | validateIndex < validateStagingBuffers.size();
|
| 1105 | ++validateIndex)
|
| 1106 | {
|
| 1107 | const size_t validateAllocIndex = validateIndex + validateAllocIndexOffset;
|
| 1108 | const VkDeviceSize validateSize = allocInfo[validateAllocIndex].m_BufferInfo.size;
|
| 1109 | TEST(validateSize % sizeof(uint32_t) == 0);
|
| 1110 | const uint32_t* stagingValPtr = (const uint32_t*)validateStagingBuffers[validateIndex];
|
| 1111 | uint32_t val = allocInfo[validateAllocIndex].m_StartValue;
|
| 1112 | bool valid = true;
|
| 1113 | for(size_t i = 0; i < validateSize / sizeof(uint32_t); ++i)
|
| 1114 | {
|
| 1115 | if(*stagingValPtr != val)
|
| 1116 | {
|
| 1117 | valid = false;
|
| 1118 | break;
|
| 1119 | }
|
| 1120 | ++stagingValPtr;
|
| 1121 | ++val;
|
| 1122 | }
|
| 1123 | TEST(valid);
|
| 1124 | }
|
| 1125 |
|
| 1126 | stagingBufs.ReleaseAllBuffers();
|
| 1127 | }
|
| 1128 | }
|
| 1129 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1130 | static void GetMemReq(VmaAllocationCreateInfo& outMemReq)
|
| 1131 | {
|
| 1132 | outMemReq = {};
|
| 1133 | outMemReq.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
|
| 1134 | //outMemReq.flags = VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT;
|
| 1135 | }
|
| 1136 |
|
| 1137 | static void CreateBuffer(
|
| 1138 | VmaPool pool,
|
| 1139 | const VkBufferCreateInfo& bufCreateInfo,
|
| 1140 | bool persistentlyMapped,
|
| 1141 | AllocInfo& outAllocInfo)
|
| 1142 | {
|
| 1143 | outAllocInfo = {};
|
| 1144 | outAllocInfo.m_BufferInfo = bufCreateInfo;
|
| 1145 |
|
| 1146 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 1147 | allocCreateInfo.pool = pool;
|
| 1148 | if(persistentlyMapped)
|
| 1149 | allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
|
| 1150 |
|
| 1151 | VmaAllocationInfo vmaAllocInfo = {};
|
| 1152 | ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &outAllocInfo.m_Buffer, &outAllocInfo.m_Allocation, &vmaAllocInfo) );
|
| 1153 |
|
| 1154 | // Setup StartValue and fill.
|
| 1155 | {
|
| 1156 | outAllocInfo.m_StartValue = (uint32_t)rand();
|
| 1157 | uint32_t* data = (uint32_t*)vmaAllocInfo.pMappedData;
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1158 | TEST((data != nullptr) == persistentlyMapped);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1159 | if(!persistentlyMapped)
|
| 1160 | {
|
| 1161 | ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, outAllocInfo.m_Allocation, (void**)&data) );
|
| 1162 | }
|
| 1163 |
|
| 1164 | uint32_t value = outAllocInfo.m_StartValue;
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1165 | TEST(bufCreateInfo.size % 4 == 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1166 | for(size_t i = 0; i < bufCreateInfo.size / sizeof(uint32_t); ++i)
|
| 1167 | data[i] = value++;
|
| 1168 |
|
| 1169 | if(!persistentlyMapped)
|
| 1170 | vmaUnmapMemory(g_hAllocator, outAllocInfo.m_Allocation);
|
| 1171 | }
|
| 1172 | }
|
| 1173 |
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1174 | static void CreateAllocation(AllocInfo& outAllocation)
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1175 | {
|
| 1176 | outAllocation.m_Allocation = nullptr;
|
| 1177 | outAllocation.m_Buffer = nullptr;
|
| 1178 | outAllocation.m_Image = nullptr;
|
| 1179 | outAllocation.m_StartValue = (uint32_t)rand();
|
| 1180 |
|
| 1181 | VmaAllocationCreateInfo vmaMemReq;
|
| 1182 | GetMemReq(vmaMemReq);
|
| 1183 |
|
| 1184 | VmaAllocationInfo allocInfo;
|
| 1185 |
|
| 1186 | const bool isBuffer = true;//(rand() & 0x1) != 0;
|
| 1187 | const bool isLarge = (rand() % 16) == 0;
|
| 1188 | if(isBuffer)
|
| 1189 | {
|
| 1190 | const uint32_t bufferSize = isLarge ?
|
| 1191 | (rand() % 10 + 1) * (1024 * 1024) : // 1 MB ... 10 MB
|
| 1192 | (rand() % 1024 + 1) * 1024; // 1 KB ... 1 MB
|
| 1193 |
|
| 1194 | VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 1195 | bufferInfo.size = bufferSize;
|
| 1196 | bufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 1197 |
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1198 | VkResult res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &vmaMemReq, &outAllocation.m_Buffer, &outAllocation.m_Allocation, &allocInfo);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1199 | outAllocation.m_BufferInfo = bufferInfo;
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1200 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1201 | }
|
| 1202 | else
|
| 1203 | {
|
| 1204 | const uint32_t imageSizeX = isLarge ?
|
| 1205 | 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
|
| 1206 | rand() % 1024 + 1; // 1 ... 1024
|
| 1207 | const uint32_t imageSizeY = isLarge ?
|
| 1208 | 1024 + rand() % (4096 - 1024) : // 1024 ... 4096
|
| 1209 | rand() % 1024 + 1; // 1 ... 1024
|
| 1210 |
|
| 1211 | VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
|
| 1212 | imageInfo.imageType = VK_IMAGE_TYPE_2D;
|
| 1213 | imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
|
| 1214 | imageInfo.extent.width = imageSizeX;
|
| 1215 | imageInfo.extent.height = imageSizeY;
|
| 1216 | imageInfo.extent.depth = 1;
|
| 1217 | imageInfo.mipLevels = 1;
|
| 1218 | imageInfo.arrayLayers = 1;
|
| 1219 | imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
| 1220 | imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
|
| 1221 | imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
|
| 1222 | imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
|
| 1223 |
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1224 | VkResult res = vmaCreateImage(g_hAllocator, &imageInfo, &vmaMemReq, &outAllocation.m_Image, &outAllocation.m_Allocation, &allocInfo);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1225 | outAllocation.m_ImageInfo = imageInfo;
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1226 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1227 | }
|
| 1228 |
|
| 1229 | uint32_t* data = (uint32_t*)allocInfo.pMappedData;
|
| 1230 | if(allocInfo.pMappedData == nullptr)
|
| 1231 | {
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1232 | VkResult res = vmaMapMemory(g_hAllocator, outAllocation.m_Allocation, (void**)&data);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1233 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1234 | }
|
| 1235 |
|
| 1236 | uint32_t value = outAllocation.m_StartValue;
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1237 | TEST(allocInfo.size % 4 == 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1238 | for(size_t i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
|
| 1239 | data[i] = value++;
|
| 1240 |
|
| 1241 | if(allocInfo.pMappedData == nullptr)
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1242 | vmaUnmapMemory(g_hAllocator, outAllocation.m_Allocation);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1243 | }
|
| 1244 |
|
| 1245 | static void DestroyAllocation(const AllocInfo& allocation)
|
| 1246 | {
|
| 1247 | if(allocation.m_Buffer)
|
| 1248 | vmaDestroyBuffer(g_hAllocator, allocation.m_Buffer, allocation.m_Allocation);
|
| 1249 | else
|
| 1250 | vmaDestroyImage(g_hAllocator, allocation.m_Image, allocation.m_Allocation);
|
| 1251 | }
|
| 1252 |
|
| 1253 | static void DestroyAllAllocations(std::vector<AllocInfo>& allocations)
|
| 1254 | {
|
| 1255 | for(size_t i = allocations.size(); i--; )
|
| 1256 | DestroyAllocation(allocations[i]);
|
| 1257 | allocations.clear();
|
| 1258 | }
|
| 1259 |
|
| 1260 | static void ValidateAllocationData(const AllocInfo& allocation)
|
| 1261 | {
|
| 1262 | VmaAllocationInfo allocInfo;
|
| 1263 | vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
|
| 1264 |
|
| 1265 | uint32_t* data = (uint32_t*)allocInfo.pMappedData;
|
| 1266 | if(allocInfo.pMappedData == nullptr)
|
| 1267 | {
|
| 1268 | VkResult res = vmaMapMemory(g_hAllocator, allocation.m_Allocation, (void**)&data);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1269 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1270 | }
|
| 1271 |
|
| 1272 | uint32_t value = allocation.m_StartValue;
|
| 1273 | bool ok = true;
|
| 1274 | size_t i;
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1275 | TEST(allocInfo.size % 4 == 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1276 | for(i = 0; i < allocInfo.size / sizeof(uint32_t); ++i)
|
| 1277 | {
|
| 1278 | if(data[i] != value++)
|
| 1279 | {
|
| 1280 | ok = false;
|
| 1281 | break;
|
| 1282 | }
|
| 1283 | }
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1284 | TEST(ok);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1285 |
|
| 1286 | if(allocInfo.pMappedData == nullptr)
|
| 1287 | vmaUnmapMemory(g_hAllocator, allocation.m_Allocation);
|
| 1288 | }
|
| 1289 |
|
| 1290 | static void RecreateAllocationResource(AllocInfo& allocation)
|
| 1291 | {
|
| 1292 | VmaAllocationInfo allocInfo;
|
| 1293 | vmaGetAllocationInfo(g_hAllocator, allocation.m_Allocation, &allocInfo);
|
| 1294 |
|
| 1295 | if(allocation.m_Buffer)
|
| 1296 | {
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 1297 | vkDestroyBuffer(g_hDevice, allocation.m_Buffer, g_Allocs);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1298 |
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 1299 | VkResult res = vkCreateBuffer(g_hDevice, &allocation.m_BufferInfo, g_Allocs, &allocation.m_Buffer);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1300 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1301 |
|
| 1302 | // Just to silence validation layer warnings.
|
| 1303 | VkMemoryRequirements vkMemReq;
|
| 1304 | vkGetBufferMemoryRequirements(g_hDevice, allocation.m_Buffer, &vkMemReq);
|
Adam Sawicki | 2af57d7 | 2018-12-06 15:35:05 +0100 | [diff] [blame] | 1305 | TEST(vkMemReq.size >= allocation.m_BufferInfo.size);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1306 |
|
Adam Sawicki | af88c1b | 2019-07-02 12:34:26 +0200 | [diff] [blame] | 1307 | res = vmaBindBufferMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Buffer);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1308 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1309 | }
|
| 1310 | else
|
| 1311 | {
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 1312 | vkDestroyImage(g_hDevice, allocation.m_Image, g_Allocs);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1313 |
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 1314 | VkResult res = vkCreateImage(g_hDevice, &allocation.m_ImageInfo, g_Allocs, &allocation.m_Image);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1315 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1316 |
|
| 1317 | // Just to silence validation layer warnings.
|
| 1318 | VkMemoryRequirements vkMemReq;
|
| 1319 | vkGetImageMemoryRequirements(g_hDevice, allocation.m_Image, &vkMemReq);
|
| 1320 |
|
Adam Sawicki | af88c1b | 2019-07-02 12:34:26 +0200 | [diff] [blame] | 1321 | res = vmaBindImageMemory(g_hAllocator, allocation.m_Allocation, allocation.m_Image);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1322 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1323 | }
|
| 1324 | }
|
| 1325 |
|
| 1326 | static void Defragment(AllocInfo* allocs, size_t allocCount,
|
| 1327 | const VmaDefragmentationInfo* defragmentationInfo = nullptr,
|
| 1328 | VmaDefragmentationStats* defragmentationStats = nullptr)
|
| 1329 | {
|
| 1330 | std::vector<VmaAllocation> vmaAllocs(allocCount);
|
| 1331 | for(size_t i = 0; i < allocCount; ++i)
|
| 1332 | vmaAllocs[i] = allocs[i].m_Allocation;
|
| 1333 |
|
| 1334 | std::vector<VkBool32> allocChanged(allocCount);
|
| 1335 |
|
| 1336 | ERR_GUARD_VULKAN( vmaDefragment(g_hAllocator, vmaAllocs.data(), allocCount, allocChanged.data(),
|
| 1337 | defragmentationInfo, defragmentationStats) );
|
| 1338 |
|
| 1339 | for(size_t i = 0; i < allocCount; ++i)
|
| 1340 | {
|
| 1341 | if(allocChanged[i])
|
| 1342 | {
|
| 1343 | RecreateAllocationResource(allocs[i]);
|
| 1344 | }
|
| 1345 | }
|
| 1346 | }
|
| 1347 |
|
| 1348 | static void ValidateAllocationsData(const AllocInfo* allocs, size_t allocCount)
|
| 1349 | {
|
| 1350 | std::for_each(allocs, allocs + allocCount, [](const AllocInfo& allocInfo) {
|
| 1351 | ValidateAllocationData(allocInfo);
|
| 1352 | });
|
| 1353 | }
|
| 1354 |
|
| 1355 | void TestDefragmentationSimple()
|
| 1356 | {
|
| 1357 | wprintf(L"Test defragmentation simple\n");
|
| 1358 |
|
| 1359 | RandomNumberGenerator rand(667);
|
| 1360 |
|
| 1361 | const VkDeviceSize BUF_SIZE = 0x10000;
|
| 1362 | const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
|
| 1363 |
|
| 1364 | const VkDeviceSize MIN_BUF_SIZE = 32;
|
| 1365 | const VkDeviceSize MAX_BUF_SIZE = BUF_SIZE * 4;
|
| 1366 | auto RandomBufSize = [&]() -> VkDeviceSize {
|
| 1367 | return align_up<VkDeviceSize>(rand.Generate() % (MAX_BUF_SIZE - MIN_BUF_SIZE + 1) + MIN_BUF_SIZE, 32);
|
| 1368 | };
|
| 1369 |
|
| 1370 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 1371 | bufCreateInfo.size = BUF_SIZE;
|
| 1372 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 1373 |
|
| 1374 | VmaAllocationCreateInfo exampleAllocCreateInfo = {};
|
| 1375 | exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 1376 |
|
| 1377 | uint32_t memTypeIndex = UINT32_MAX;
|
| 1378 | vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
|
| 1379 |
|
| 1380 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 1381 | poolCreateInfo.blockSize = BLOCK_SIZE;
|
| 1382 | poolCreateInfo.memoryTypeIndex = memTypeIndex;
|
| 1383 |
|
| 1384 | VmaPool pool;
|
| 1385 | ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
|
| 1386 |
|
Adam Sawicki | e168191 | 2018-11-23 17:50:12 +0100 | [diff] [blame] | 1387 | // Defragmentation of empty pool.
|
| 1388 | {
|
| 1389 | VmaDefragmentationInfo2 defragInfo = {};
|
| 1390 | defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
|
| 1391 | defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
|
| 1392 | defragInfo.poolCount = 1;
|
| 1393 | defragInfo.pPools = &pool;
|
| 1394 |
|
| 1395 | VmaDefragmentationStats defragStats = {};
|
| 1396 | VmaDefragmentationContext defragCtx = nullptr;
|
| 1397 | VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats, &defragCtx);
|
| 1398 | TEST(res >= VK_SUCCESS);
|
| 1399 | vmaDefragmentationEnd(g_hAllocator, defragCtx);
|
| 1400 | TEST(defragStats.allocationsMoved == 0 && defragStats.bytesFreed == 0 &&
|
| 1401 | defragStats.bytesMoved == 0 && defragStats.deviceMemoryBlocksFreed == 0);
|
| 1402 | }
|
| 1403 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1404 | std::vector<AllocInfo> allocations;
|
| 1405 |
|
| 1406 | // persistentlyMappedOption = 0 - not persistently mapped.
|
| 1407 | // persistentlyMappedOption = 1 - persistently mapped.
|
| 1408 | for(uint32_t persistentlyMappedOption = 0; persistentlyMappedOption < 2; ++persistentlyMappedOption)
|
| 1409 | {
|
| 1410 | wprintf(L" Persistently mapped option = %u\n", persistentlyMappedOption);
|
| 1411 | const bool persistentlyMapped = persistentlyMappedOption != 0;
|
| 1412 |
|
| 1413 | // # Test 1
|
| 1414 | // Buffers of fixed size.
|
| 1415 | // Fill 2 blocks. Remove odd buffers. Defragment everything.
|
| 1416 | // Expected result: at least 1 block freed.
|
| 1417 | {
|
| 1418 | for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
|
| 1419 | {
|
| 1420 | AllocInfo allocInfo;
|
| 1421 | CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
|
| 1422 | allocations.push_back(allocInfo);
|
| 1423 | }
|
| 1424 |
|
| 1425 | for(size_t i = 1; i < allocations.size(); ++i)
|
| 1426 | {
|
| 1427 | DestroyAllocation(allocations[i]);
|
| 1428 | allocations.erase(allocations.begin() + i);
|
| 1429 | }
|
| 1430 |
|
| 1431 | VmaDefragmentationStats defragStats;
|
| 1432 | Defragment(allocations.data(), allocations.size(), nullptr, &defragStats);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1433 | TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
|
| 1434 | TEST(defragStats.deviceMemoryBlocksFreed >= 1);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1435 |
|
| 1436 | ValidateAllocationsData(allocations.data(), allocations.size());
|
| 1437 |
|
| 1438 | DestroyAllAllocations(allocations);
|
| 1439 | }
|
| 1440 |
|
| 1441 | // # Test 2
|
| 1442 | // Buffers of fixed size.
|
| 1443 | // Fill 2 blocks. Remove odd buffers. Defragment one buffer at time.
|
| 1444 | // Expected result: Each of 4 interations makes some progress.
|
| 1445 | {
|
| 1446 | for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
|
| 1447 | {
|
| 1448 | AllocInfo allocInfo;
|
| 1449 | CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
|
| 1450 | allocations.push_back(allocInfo);
|
| 1451 | }
|
| 1452 |
|
| 1453 | for(size_t i = 1; i < allocations.size(); ++i)
|
| 1454 | {
|
| 1455 | DestroyAllocation(allocations[i]);
|
| 1456 | allocations.erase(allocations.begin() + i);
|
| 1457 | }
|
| 1458 |
|
| 1459 | VmaDefragmentationInfo defragInfo = {};
|
| 1460 | defragInfo.maxAllocationsToMove = 1;
|
| 1461 | defragInfo.maxBytesToMove = BUF_SIZE;
|
| 1462 |
|
| 1463 | for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE / 2; ++i)
|
| 1464 | {
|
| 1465 | VmaDefragmentationStats defragStats;
|
| 1466 | Defragment(allocations.data(), allocations.size(), &defragInfo, &defragStats);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1467 | TEST(defragStats.allocationsMoved > 0 && defragStats.bytesMoved > 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1468 | }
|
| 1469 |
|
| 1470 | ValidateAllocationsData(allocations.data(), allocations.size());
|
| 1471 |
|
| 1472 | DestroyAllAllocations(allocations);
|
| 1473 | }
|
| 1474 |
|
| 1475 | // # Test 3
|
| 1476 | // Buffers of variable size.
|
| 1477 | // Create a number of buffers. Remove some percent of them.
|
| 1478 | // Defragment while having some percent of them unmovable.
|
| 1479 | // Expected result: Just simple validation.
|
| 1480 | {
|
| 1481 | for(size_t i = 0; i < 100; ++i)
|
| 1482 | {
|
| 1483 | VkBufferCreateInfo localBufCreateInfo = bufCreateInfo;
|
| 1484 | localBufCreateInfo.size = RandomBufSize();
|
| 1485 |
|
| 1486 | AllocInfo allocInfo;
|
| 1487 | CreateBuffer(pool, bufCreateInfo, persistentlyMapped, allocInfo);
|
| 1488 | allocations.push_back(allocInfo);
|
| 1489 | }
|
| 1490 |
|
| 1491 | const uint32_t percentToDelete = 60;
|
| 1492 | const size_t numberToDelete = allocations.size() * percentToDelete / 100;
|
| 1493 | for(size_t i = 0; i < numberToDelete; ++i)
|
| 1494 | {
|
| 1495 | size_t indexToDelete = rand.Generate() % (uint32_t)allocations.size();
|
| 1496 | DestroyAllocation(allocations[indexToDelete]);
|
| 1497 | allocations.erase(allocations.begin() + indexToDelete);
|
| 1498 | }
|
| 1499 |
|
| 1500 | // Non-movable allocations will be at the beginning of allocations array.
|
| 1501 | const uint32_t percentNonMovable = 20;
|
| 1502 | const size_t numberNonMovable = allocations.size() * percentNonMovable / 100;
|
| 1503 | for(size_t i = 0; i < numberNonMovable; ++i)
|
| 1504 | {
|
| 1505 | size_t indexNonMovable = i + rand.Generate() % (uint32_t)(allocations.size() - i);
|
| 1506 | if(indexNonMovable != i)
|
| 1507 | std::swap(allocations[i], allocations[indexNonMovable]);
|
| 1508 | }
|
| 1509 |
|
| 1510 | VmaDefragmentationStats defragStats;
|
| 1511 | Defragment(
|
| 1512 | allocations.data() + numberNonMovable,
|
| 1513 | allocations.size() - numberNonMovable,
|
| 1514 | nullptr, &defragStats);
|
| 1515 |
|
| 1516 | ValidateAllocationsData(allocations.data(), allocations.size());
|
| 1517 |
|
| 1518 | DestroyAllAllocations(allocations);
|
| 1519 | }
|
| 1520 | }
|
| 1521 |
|
Adam Sawicki | 647cf24 | 2018-11-23 17:58:00 +0100 | [diff] [blame] | 1522 | /*
|
| 1523 | Allocation that must be move to an overlapping place using memmove().
|
| 1524 | Create 2 buffers, second slightly bigger than the first. Delete first. Then defragment.
|
| 1525 | */
|
Adam Sawicki | bdb89a9 | 2018-12-13 11:56:30 +0100 | [diff] [blame] | 1526 | if(VMA_DEBUG_MARGIN == 0) // FAST algorithm works only when DEBUG_MARGIN disabled.
|
Adam Sawicki | 647cf24 | 2018-11-23 17:58:00 +0100 | [diff] [blame] | 1527 | {
|
| 1528 | AllocInfo allocInfo[2];
|
| 1529 |
|
| 1530 | bufCreateInfo.size = BUF_SIZE;
|
| 1531 | CreateBuffer(pool, bufCreateInfo, false, allocInfo[0]);
|
| 1532 | const VkDeviceSize biggerBufSize = BUF_SIZE + BUF_SIZE / 256;
|
| 1533 | bufCreateInfo.size = biggerBufSize;
|
| 1534 | CreateBuffer(pool, bufCreateInfo, false, allocInfo[1]);
|
| 1535 |
|
| 1536 | DestroyAllocation(allocInfo[0]);
|
| 1537 |
|
| 1538 | VmaDefragmentationStats defragStats;
|
| 1539 | Defragment(&allocInfo[1], 1, nullptr, &defragStats);
|
| 1540 | // If this fails, it means we couldn't do memmove with overlapping regions.
|
| 1541 | TEST(defragStats.allocationsMoved == 1 && defragStats.bytesMoved > 0);
|
| 1542 |
|
| 1543 | ValidateAllocationsData(&allocInfo[1], 1);
|
| 1544 | DestroyAllocation(allocInfo[1]);
|
| 1545 | }
|
| 1546 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1547 | vmaDestroyPool(g_hAllocator, pool);
|
| 1548 | }
|
| 1549 |
|
Adam Sawicki | 52076eb | 2018-11-22 16:14:50 +0100 | [diff] [blame] | 1550 | void TestDefragmentationWholePool()
|
| 1551 | {
|
| 1552 | wprintf(L"Test defragmentation whole pool\n");
|
| 1553 |
|
| 1554 | RandomNumberGenerator rand(668);
|
| 1555 |
|
| 1556 | const VkDeviceSize BUF_SIZE = 0x10000;
|
| 1557 | const VkDeviceSize BLOCK_SIZE = BUF_SIZE * 8;
|
| 1558 |
|
| 1559 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 1560 | bufCreateInfo.size = BUF_SIZE;
|
| 1561 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 1562 |
|
| 1563 | VmaAllocationCreateInfo exampleAllocCreateInfo = {};
|
| 1564 | exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 1565 |
|
| 1566 | uint32_t memTypeIndex = UINT32_MAX;
|
| 1567 | vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex);
|
| 1568 |
|
| 1569 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 1570 | poolCreateInfo.blockSize = BLOCK_SIZE;
|
| 1571 | poolCreateInfo.memoryTypeIndex = memTypeIndex;
|
| 1572 |
|
| 1573 | VmaDefragmentationStats defragStats[2];
|
| 1574 | for(size_t caseIndex = 0; caseIndex < 2; ++caseIndex)
|
| 1575 | {
|
| 1576 | VmaPool pool;
|
| 1577 | ERR_GUARD_VULKAN( vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool) );
|
| 1578 |
|
| 1579 | std::vector<AllocInfo> allocations;
|
| 1580 |
|
| 1581 | // Buffers of fixed size.
|
| 1582 | // Fill 2 blocks. Remove odd buffers. Defragment all of them.
|
| 1583 | for(size_t i = 0; i < BLOCK_SIZE / BUF_SIZE * 2; ++i)
|
| 1584 | {
|
| 1585 | AllocInfo allocInfo;
|
| 1586 | CreateBuffer(pool, bufCreateInfo, false, allocInfo);
|
| 1587 | allocations.push_back(allocInfo);
|
| 1588 | }
|
| 1589 |
|
| 1590 | for(size_t i = 1; i < allocations.size(); ++i)
|
| 1591 | {
|
| 1592 | DestroyAllocation(allocations[i]);
|
| 1593 | allocations.erase(allocations.begin() + i);
|
| 1594 | }
|
| 1595 |
|
| 1596 | VmaDefragmentationInfo2 defragInfo = {};
|
| 1597 | defragInfo.maxCpuAllocationsToMove = UINT32_MAX;
|
| 1598 | defragInfo.maxCpuBytesToMove = VK_WHOLE_SIZE;
|
| 1599 | std::vector<VmaAllocation> allocationsToDefrag;
|
| 1600 | if(caseIndex == 0)
|
| 1601 | {
|
| 1602 | defragInfo.poolCount = 1;
|
| 1603 | defragInfo.pPools = &pool;
|
| 1604 | }
|
| 1605 | else
|
| 1606 | {
|
| 1607 | const size_t allocCount = allocations.size();
|
| 1608 | allocationsToDefrag.resize(allocCount);
|
| 1609 | std::transform(
|
| 1610 | allocations.begin(), allocations.end(),
|
| 1611 | allocationsToDefrag.begin(),
|
| 1612 | [](const AllocInfo& allocInfo) { return allocInfo.m_Allocation; });
|
| 1613 | defragInfo.allocationCount = (uint32_t)allocCount;
|
| 1614 | defragInfo.pAllocations = allocationsToDefrag.data();
|
| 1615 | }
|
| 1616 |
|
| 1617 | VmaDefragmentationContext defragCtx = VK_NULL_HANDLE;
|
| 1618 | VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &defragStats[caseIndex], &defragCtx);
|
| 1619 | TEST(res >= VK_SUCCESS);
|
| 1620 | vmaDefragmentationEnd(g_hAllocator, defragCtx);
|
| 1621 |
|
| 1622 | TEST(defragStats[caseIndex].allocationsMoved > 0 && defragStats[caseIndex].bytesMoved > 0);
|
| 1623 |
|
| 1624 | ValidateAllocationsData(allocations.data(), allocations.size());
|
| 1625 |
|
| 1626 | DestroyAllAllocations(allocations);
|
| 1627 |
|
| 1628 | vmaDestroyPool(g_hAllocator, pool);
|
| 1629 | }
|
| 1630 |
|
| 1631 | TEST(defragStats[0].bytesMoved == defragStats[1].bytesMoved);
|
| 1632 | TEST(defragStats[0].allocationsMoved == defragStats[1].allocationsMoved);
|
| 1633 | TEST(defragStats[0].bytesFreed == defragStats[1].bytesFreed);
|
| 1634 | TEST(defragStats[0].deviceMemoryBlocksFreed == defragStats[1].deviceMemoryBlocksFreed);
|
| 1635 | }
|
| 1636 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1637 | void TestDefragmentationFull()
|
| 1638 | {
|
| 1639 | std::vector<AllocInfo> allocations;
|
| 1640 |
|
| 1641 | // Create initial allocations.
|
| 1642 | for(size_t i = 0; i < 400; ++i)
|
| 1643 | {
|
| 1644 | AllocInfo allocation;
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1645 | CreateAllocation(allocation);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1646 | allocations.push_back(allocation);
|
| 1647 | }
|
| 1648 |
|
| 1649 | // Delete random allocations
|
| 1650 | const size_t allocationsToDeletePercent = 80;
|
| 1651 | size_t allocationsToDelete = allocations.size() * allocationsToDeletePercent / 100;
|
| 1652 | for(size_t i = 0; i < allocationsToDelete; ++i)
|
| 1653 | {
|
| 1654 | size_t index = (size_t)rand() % allocations.size();
|
| 1655 | DestroyAllocation(allocations[index]);
|
| 1656 | allocations.erase(allocations.begin() + index);
|
| 1657 | }
|
| 1658 |
|
| 1659 | for(size_t i = 0; i < allocations.size(); ++i)
|
| 1660 | ValidateAllocationData(allocations[i]);
|
| 1661 |
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 1662 | //SaveAllocatorStatsToFile(L"Before.csv");
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1663 |
|
| 1664 | {
|
| 1665 | std::vector<VmaAllocation> vmaAllocations(allocations.size());
|
| 1666 | for(size_t i = 0; i < allocations.size(); ++i)
|
| 1667 | vmaAllocations[i] = allocations[i].m_Allocation;
|
| 1668 |
|
| 1669 | const size_t nonMovablePercent = 0;
|
| 1670 | size_t nonMovableCount = vmaAllocations.size() * nonMovablePercent / 100;
|
| 1671 | for(size_t i = 0; i < nonMovableCount; ++i)
|
| 1672 | {
|
| 1673 | size_t index = (size_t)rand() % vmaAllocations.size();
|
| 1674 | vmaAllocations.erase(vmaAllocations.begin() + index);
|
| 1675 | }
|
| 1676 |
|
| 1677 | const uint32_t defragCount = 1;
|
| 1678 | for(uint32_t defragIndex = 0; defragIndex < defragCount; ++defragIndex)
|
| 1679 | {
|
| 1680 | std::vector<VkBool32> allocationsChanged(vmaAllocations.size());
|
| 1681 |
|
| 1682 | VmaDefragmentationInfo defragmentationInfo;
|
| 1683 | defragmentationInfo.maxAllocationsToMove = UINT_MAX;
|
| 1684 | defragmentationInfo.maxBytesToMove = SIZE_MAX;
|
| 1685 |
|
| 1686 | wprintf(L"Defragmentation #%u\n", defragIndex);
|
| 1687 |
|
| 1688 | time_point begTime = std::chrono::high_resolution_clock::now();
|
| 1689 |
|
| 1690 | VmaDefragmentationStats stats;
|
| 1691 | VkResult res = vmaDefragment(g_hAllocator, vmaAllocations.data(), vmaAllocations.size(), allocationsChanged.data(), &defragmentationInfo, &stats);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 1692 | TEST(res >= 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1693 |
|
| 1694 | float defragmentDuration = ToFloatSeconds(std::chrono::high_resolution_clock::now() - begTime);
|
| 1695 |
|
| 1696 | wprintf(L"Moved allocations %u, bytes %llu\n", stats.allocationsMoved, stats.bytesMoved);
|
| 1697 | wprintf(L"Freed blocks %u, bytes %llu\n", stats.deviceMemoryBlocksFreed, stats.bytesFreed);
|
| 1698 | wprintf(L"Time: %.2f s\n", defragmentDuration);
|
| 1699 |
|
| 1700 | for(size_t i = 0; i < vmaAllocations.size(); ++i)
|
| 1701 | {
|
| 1702 | if(allocationsChanged[i])
|
| 1703 | {
|
| 1704 | RecreateAllocationResource(allocations[i]);
|
| 1705 | }
|
| 1706 | }
|
| 1707 |
|
| 1708 | for(size_t i = 0; i < allocations.size(); ++i)
|
| 1709 | ValidateAllocationData(allocations[i]);
|
| 1710 |
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 1711 | //wchar_t fileName[MAX_PATH];
|
| 1712 | //swprintf(fileName, MAX_PATH, L"After_%02u.csv", defragIndex);
|
| 1713 | //SaveAllocatorStatsToFile(fileName);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 1714 | }
|
| 1715 | }
|
| 1716 |
|
| 1717 | // Destroy all remaining allocations.
|
| 1718 | DestroyAllAllocations(allocations);
|
| 1719 | }
|
| 1720 |
|
Adam Sawicki | 9a4f508 | 2018-11-23 17:26:05 +0100 | [diff] [blame] | 1721 | static void TestDefragmentationGpu()
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1722 | {
|
Adam Sawicki | 9a4f508 | 2018-11-23 17:26:05 +0100 | [diff] [blame] | 1723 | wprintf(L"Test defragmentation GPU\n");
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1724 |
|
| 1725 | std::vector<AllocInfo> allocations;
|
| 1726 |
|
| 1727 | // Create that many allocations to surely fill 3 new blocks of 256 MB.
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1728 | const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
|
| 1729 | const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1730 | const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1731 | const size_t bufCount = (size_t)(totalSize / bufSizeMin);
|
| 1732 | const size_t percentToLeave = 30;
|
| 1733 | const size_t percentNonMovable = 3;
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1734 | RandomNumberGenerator rand = { 234522 };
|
| 1735 |
|
| 1736 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1737 |
|
| 1738 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 1739 | allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1740 | allocCreateInfo.flags = 0;
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1741 |
|
| 1742 | // Create all intended buffers.
|
| 1743 | for(size_t i = 0; i < bufCount; ++i)
|
| 1744 | {
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1745 | bufCreateInfo.size = align_up(rand.Generate() % (bufSizeMax - bufSizeMin) + bufSizeMin, 32ull);
|
| 1746 |
|
| 1747 | if(rand.Generate() % 100 < percentNonMovable)
|
| 1748 | {
|
| 1749 | bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
|
| 1750 | VK_BUFFER_USAGE_TRANSFER_DST_BIT |
|
| 1751 | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 1752 | allocCreateInfo.pUserData = (void*)(uintptr_t)2;
|
| 1753 | }
|
| 1754 | else
|
| 1755 | {
|
| 1756 | // Different usage just to see different color in output from VmaDumpVis.
|
| 1757 | bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
|
| 1758 | VK_BUFFER_USAGE_TRANSFER_DST_BIT |
|
| 1759 | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 1760 | // And in JSON dump.
|
| 1761 | allocCreateInfo.pUserData = (void*)(uintptr_t)1;
|
| 1762 | }
|
| 1763 |
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1764 | AllocInfo alloc;
|
| 1765 | alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
|
| 1766 | alloc.m_StartValue = rand.Generate();
|
| 1767 | allocations.push_back(alloc);
|
| 1768 | }
|
| 1769 |
|
| 1770 | // Destroy some percentage of them.
|
| 1771 | {
|
| 1772 | const size_t buffersToDestroy = round_div<size_t>(bufCount * (100 - percentToLeave), 100);
|
| 1773 | for(size_t i = 0; i < buffersToDestroy; ++i)
|
| 1774 | {
|
| 1775 | const size_t index = rand.Generate() % allocations.size();
|
| 1776 | allocations[index].Destroy();
|
| 1777 | allocations.erase(allocations.begin() + index);
|
| 1778 | }
|
| 1779 | }
|
| 1780 |
|
| 1781 | // Fill them with meaningful data.
|
| 1782 | UploadGpuData(allocations.data(), allocations.size());
|
| 1783 |
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1784 | wchar_t fileName[MAX_PATH];
|
Adam Sawicki | 9a4f508 | 2018-11-23 17:26:05 +0100 | [diff] [blame] | 1785 | swprintf_s(fileName, L"GPU_defragmentation_A_before.json");
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1786 | SaveAllocatorStatsToFile(fileName);
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1787 |
|
| 1788 | // Defragment using GPU only.
|
| 1789 | {
|
| 1790 | const size_t allocCount = allocations.size();
|
Adam Sawicki | 440307e | 2018-10-18 15:05:19 +0200 | [diff] [blame] | 1791 |
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1792 | std::vector<VmaAllocation> allocationPtrs;
|
| 1793 | std::vector<VkBool32> allocationChanged;
|
| 1794 | std::vector<size_t> allocationOriginalIndex;
|
| 1795 |
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1796 | for(size_t i = 0; i < allocCount; ++i)
|
| 1797 | {
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1798 | VmaAllocationInfo allocInfo = {};
|
| 1799 | vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
|
| 1800 | if((uintptr_t)allocInfo.pUserData == 1) // Movable
|
| 1801 | {
|
| 1802 | allocationPtrs.push_back(allocations[i].m_Allocation);
|
| 1803 | allocationChanged.push_back(VK_FALSE);
|
| 1804 | allocationOriginalIndex.push_back(i);
|
| 1805 | }
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1806 | }
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1807 |
|
| 1808 | const size_t movableAllocCount = allocationPtrs.size();
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1809 |
|
| 1810 | BeginSingleTimeCommands();
|
| 1811 |
|
| 1812 | VmaDefragmentationInfo2 defragInfo = {};
|
Adam Sawicki | 9a4f508 | 2018-11-23 17:26:05 +0100 | [diff] [blame] | 1813 | defragInfo.flags = 0;
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1814 | defragInfo.allocationCount = (uint32_t)movableAllocCount;
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1815 | defragInfo.pAllocations = allocationPtrs.data();
|
Adam Sawicki | 440307e | 2018-10-18 15:05:19 +0200 | [diff] [blame] | 1816 | defragInfo.pAllocationsChanged = allocationChanged.data();
|
| 1817 | defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1818 | defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
|
| 1819 | defragInfo.commandBuffer = g_hTemporaryCommandBuffer;
|
| 1820 |
|
| 1821 | VmaDefragmentationStats stats = {};
|
| 1822 | VmaDefragmentationContext ctx = VK_NULL_HANDLE;
|
| 1823 | VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
|
| 1824 | TEST(res >= VK_SUCCESS);
|
| 1825 |
|
| 1826 | EndSingleTimeCommands();
|
| 1827 |
|
| 1828 | vmaDefragmentationEnd(g_hAllocator, ctx);
|
| 1829 |
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1830 | for(size_t i = 0; i < movableAllocCount; ++i)
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1831 | {
|
| 1832 | if(allocationChanged[i])
|
| 1833 | {
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1834 | const size_t origAllocIndex = allocationOriginalIndex[i];
|
| 1835 | RecreateAllocationResource(allocations[origAllocIndex]);
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1836 | }
|
| 1837 | }
|
| 1838 |
|
Adam Sawicki | 4d844e2 | 2019-01-24 16:21:05 +0100 | [diff] [blame] | 1839 | // If corruption detection is enabled, GPU defragmentation may not work on
|
| 1840 | // memory types that have this detection active, e.g. on Intel.
|
Adam Sawicki | a1f727c | 2019-01-24 16:25:11 +0100 | [diff] [blame] | 1841 | #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
|
Adam Sawicki | 4d844e2 | 2019-01-24 16:21:05 +0100 | [diff] [blame] | 1842 | TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
|
| 1843 | TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
|
Adam Sawicki | a1f727c | 2019-01-24 16:25:11 +0100 | [diff] [blame] | 1844 | #endif
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1845 | }
|
| 1846 |
|
| 1847 | ValidateGpuData(allocations.data(), allocations.size());
|
| 1848 |
|
Adam Sawicki | 9a4f508 | 2018-11-23 17:26:05 +0100 | [diff] [blame] | 1849 | swprintf_s(fileName, L"GPU_defragmentation_B_after.json");
|
Adam Sawicki | c6ede15 | 2018-11-16 17:04:14 +0100 | [diff] [blame] | 1850 | SaveAllocatorStatsToFile(fileName);
|
Adam Sawicki | ff0f7b8 | 2018-10-18 14:44:05 +0200 | [diff] [blame] | 1851 |
|
| 1852 | // Destroy all remaining buffers.
|
| 1853 | for(size_t i = allocations.size(); i--; )
|
| 1854 | {
|
| 1855 | allocations[i].Destroy();
|
| 1856 | }
|
| 1857 | }
|
| 1858 |
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 1859 | static void ProcessDefragmentationStepInfo(VmaDefragmentationPassInfo &stepInfo)
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 1860 | {
|
| 1861 | std::vector<VkImageMemoryBarrier> beginImageBarriers;
|
| 1862 | std::vector<VkImageMemoryBarrier> finalizeImageBarriers;
|
| 1863 |
|
| 1864 | VkPipelineStageFlags beginSrcStageMask = 0;
|
| 1865 | VkPipelineStageFlags beginDstStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
| 1866 |
|
| 1867 | VkPipelineStageFlags finalizeSrcStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
| 1868 | VkPipelineStageFlags finalizeDstStageMask = 0;
|
| 1869 |
|
| 1870 | bool wantsMemoryBarrier = false;
|
| 1871 |
|
| 1872 | VkMemoryBarrier beginMemoryBarrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER };
|
| 1873 | VkMemoryBarrier finalizeMemoryBarrier = { VK_STRUCTURE_TYPE_MEMORY_BARRIER };
|
| 1874 |
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 1875 | for(uint32_t i = 0; i < stepInfo.moveCount; ++i)
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 1876 | {
|
| 1877 | VmaAllocationInfo info;
|
| 1878 | vmaGetAllocationInfo(g_hAllocator, stepInfo.pMoves[i].allocation, &info);
|
| 1879 |
|
| 1880 | AllocInfo *allocInfo = (AllocInfo *)info.pUserData;
|
| 1881 |
|
| 1882 | if(allocInfo->m_Image)
|
| 1883 | {
|
| 1884 | VkImage newImage;
|
| 1885 |
|
| 1886 | const VkResult result = vkCreateImage(g_hDevice, &allocInfo->m_ImageInfo, g_Allocs, &newImage);
|
| 1887 | TEST(result >= VK_SUCCESS);
|
| 1888 |
|
| 1889 | vkBindImageMemory(g_hDevice, newImage, stepInfo.pMoves[i].memory, stepInfo.pMoves[i].offset);
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 1890 | allocInfo->m_NewImage = newImage;
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 1891 |
|
| 1892 | // Keep track of our pipeline stages that we need to wait/signal on
|
| 1893 | beginSrcStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
| 1894 | finalizeDstStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
| 1895 |
|
| 1896 | // We need one pipeline barrier and two image layout transitions here
|
| 1897 | // First we'll have to turn our newly created image into VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
|
| 1898 | // And the second one is turning the old image into VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
|
| 1899 |
|
| 1900 | VkImageSubresourceRange subresourceRange = {
|
| 1901 | VK_IMAGE_ASPECT_COLOR_BIT,
|
| 1902 | 0, VK_REMAINING_MIP_LEVELS,
|
| 1903 | 0, VK_REMAINING_ARRAY_LAYERS
|
| 1904 | };
|
| 1905 |
|
| 1906 | VkImageMemoryBarrier barrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
|
| 1907 | barrier.srcAccessMask = 0;
|
| 1908 | barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
| 1909 | barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
| 1910 | barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
| 1911 | barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
| 1912 | barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
| 1913 | barrier.image = newImage;
|
| 1914 | barrier.subresourceRange = subresourceRange;
|
| 1915 |
|
| 1916 | beginImageBarriers.push_back(barrier);
|
| 1917 |
|
| 1918 | // Second barrier to convert the existing image. This one actually needs a real barrier
|
| 1919 | barrier.srcAccessMask = VK_ACCESS_MEMORY_WRITE_BIT;
|
| 1920 | barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
| 1921 | barrier.oldLayout = allocInfo->m_ImageLayout;
|
| 1922 | barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
|
| 1923 | barrier.image = allocInfo->m_Image;
|
| 1924 |
|
| 1925 | beginImageBarriers.push_back(barrier);
|
| 1926 |
|
| 1927 | // And lastly we need a barrier that turns our new image into the layout of the old one
|
| 1928 | barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
| 1929 | barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
|
| 1930 | barrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
| 1931 | barrier.newLayout = allocInfo->m_ImageLayout;
|
| 1932 | barrier.image = newImage;
|
| 1933 |
|
| 1934 | finalizeImageBarriers.push_back(barrier);
|
| 1935 | }
|
| 1936 | else if(allocInfo->m_Buffer)
|
| 1937 | {
|
| 1938 | VkBuffer newBuffer;
|
| 1939 |
|
| 1940 | const VkResult result = vkCreateBuffer(g_hDevice, &allocInfo->m_BufferInfo, g_Allocs, &newBuffer);
|
| 1941 | TEST(result >= VK_SUCCESS);
|
| 1942 |
|
| 1943 | vkBindBufferMemory(g_hDevice, newBuffer, stepInfo.pMoves[i].memory, stepInfo.pMoves[i].offset);
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 1944 | allocInfo->m_NewBuffer = newBuffer;
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 1945 |
|
| 1946 | // Keep track of our pipeline stages that we need to wait/signal on
|
| 1947 | beginSrcStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
| 1948 | finalizeDstStageMask |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
| 1949 |
|
| 1950 | beginMemoryBarrier.srcAccessMask |= VK_ACCESS_MEMORY_WRITE_BIT;
|
| 1951 | beginMemoryBarrier.dstAccessMask |= VK_ACCESS_TRANSFER_READ_BIT;
|
| 1952 |
|
| 1953 | finalizeMemoryBarrier.srcAccessMask |= VK_ACCESS_TRANSFER_WRITE_BIT;
|
| 1954 | finalizeMemoryBarrier.dstAccessMask |= VK_ACCESS_MEMORY_READ_BIT;
|
| 1955 |
|
| 1956 | wantsMemoryBarrier = true;
|
| 1957 | }
|
| 1958 | }
|
| 1959 |
|
| 1960 | if(!beginImageBarriers.empty() || wantsMemoryBarrier)
|
| 1961 | {
|
| 1962 | const uint32_t memoryBarrierCount = wantsMemoryBarrier ? 1 : 0;
|
| 1963 |
|
| 1964 | vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, beginSrcStageMask, beginDstStageMask, 0,
|
| 1965 | memoryBarrierCount, &beginMemoryBarrier,
|
| 1966 | 0, nullptr,
|
| 1967 | (uint32_t)beginImageBarriers.size(), beginImageBarriers.data());
|
| 1968 | }
|
| 1969 |
|
| 1970 | for(uint32_t i = 0; i < stepInfo.moveCount; ++ i)
|
| 1971 | {
|
| 1972 | VmaAllocationInfo info;
|
| 1973 | vmaGetAllocationInfo(g_hAllocator, stepInfo.pMoves[i].allocation, &info);
|
| 1974 |
|
| 1975 | AllocInfo *allocInfo = (AllocInfo *)info.pUserData;
|
| 1976 |
|
| 1977 | if(allocInfo->m_Image)
|
| 1978 | {
|
| 1979 | std::vector<VkImageCopy> imageCopies;
|
| 1980 |
|
| 1981 | // Copy all mips of the source image into the target image
|
| 1982 | VkOffset3D offset = { 0, 0, 0 };
|
| 1983 | VkExtent3D extent = allocInfo->m_ImageInfo.extent;
|
| 1984 |
|
| 1985 | VkImageSubresourceLayers subresourceLayers = {
|
| 1986 | VK_IMAGE_ASPECT_COLOR_BIT,
|
| 1987 | 0,
|
| 1988 | 0, 1
|
| 1989 | };
|
| 1990 |
|
| 1991 | for(uint32_t mip = 0; mip < allocInfo->m_ImageInfo.mipLevels; ++ mip)
|
| 1992 | {
|
| 1993 | subresourceLayers.mipLevel = mip;
|
| 1994 |
|
| 1995 | VkImageCopy imageCopy{
|
| 1996 | subresourceLayers,
|
| 1997 | offset,
|
| 1998 | subresourceLayers,
|
| 1999 | offset,
|
| 2000 | extent
|
| 2001 | };
|
| 2002 |
|
| 2003 | imageCopies.push_back(imageCopy);
|
| 2004 |
|
| 2005 | extent.width = std::max(uint32_t(1), extent.width >> 1);
|
| 2006 | extent.height = std::max(uint32_t(1), extent.height >> 1);
|
| 2007 | extent.depth = std::max(uint32_t(1), extent.depth >> 1);
|
| 2008 | }
|
| 2009 |
|
| 2010 | vkCmdCopyImage(
|
| 2011 | g_hTemporaryCommandBuffer,
|
| 2012 | allocInfo->m_Image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2013 | allocInfo->m_NewImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2014 | (uint32_t)imageCopies.size(), imageCopies.data());
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2015 | }
|
| 2016 | else if(allocInfo->m_Buffer)
|
| 2017 | {
|
| 2018 | VkBufferCopy region = {
|
| 2019 | 0,
|
| 2020 | 0,
|
| 2021 | allocInfo->m_BufferInfo.size };
|
| 2022 |
|
| 2023 | vkCmdCopyBuffer(g_hTemporaryCommandBuffer,
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2024 | allocInfo->m_Buffer, allocInfo->m_NewBuffer,
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2025 | 1, ®ion);
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2026 | }
|
| 2027 | }
|
| 2028 |
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2029 | if(!finalizeImageBarriers.empty() || wantsMemoryBarrier)
|
| 2030 | {
|
| 2031 | const uint32_t memoryBarrierCount = wantsMemoryBarrier ? 1 : 0;
|
| 2032 |
|
| 2033 | vkCmdPipelineBarrier(g_hTemporaryCommandBuffer, finalizeSrcStageMask, finalizeDstStageMask, 0,
|
| 2034 | memoryBarrierCount, &finalizeMemoryBarrier,
|
| 2035 | 0, nullptr,
|
| 2036 | (uint32_t)finalizeImageBarriers.size(), finalizeImageBarriers.data());
|
| 2037 | }
|
| 2038 | }
|
| 2039 |
|
| 2040 |
|
| 2041 | static void TestDefragmentationIncrementalBasic()
|
| 2042 | {
|
| 2043 | wprintf(L"Test defragmentation incremental basic\n");
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2044 |
|
| 2045 | std::vector<AllocInfo> allocations;
|
| 2046 |
|
| 2047 | // Create that many allocations to surely fill 3 new blocks of 256 MB.
|
| 2048 | const std::array<uint32_t, 3> imageSizes = { 256, 512, 1024 };
|
| 2049 | const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
|
| 2050 | const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
|
| 2051 | const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2052 | const size_t imageCount = totalSize / ((size_t)imageSizes[0] * imageSizes[0] * 4) / 2;
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2053 | const size_t bufCount = (size_t)(totalSize / bufSizeMin) / 2;
|
| 2054 | const size_t percentToLeave = 30;
|
| 2055 | RandomNumberGenerator rand = { 234522 };
|
| 2056 |
|
| 2057 | VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
|
| 2058 | imageInfo.imageType = VK_IMAGE_TYPE_2D;
|
| 2059 | imageInfo.extent.depth = 1;
|
| 2060 | imageInfo.mipLevels = 1;
|
| 2061 | imageInfo.arrayLayers = 1;
|
| 2062 | imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
|
| 2063 | imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
|
| 2064 | imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
|
| 2065 | imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
|
| 2066 | imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
| 2067 |
|
| 2068 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2069 | allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 2070 | allocCreateInfo.flags = 0;
|
| 2071 |
|
| 2072 | // Create all intended images.
|
| 2073 | for(size_t i = 0; i < imageCount; ++i)
|
| 2074 | {
|
| 2075 | const uint32_t size = imageSizes[rand.Generate() % 3];
|
| 2076 |
|
| 2077 | imageInfo.extent.width = size;
|
| 2078 | imageInfo.extent.height = size;
|
| 2079 |
|
| 2080 | AllocInfo alloc;
|
| 2081 | alloc.CreateImage(imageInfo, allocCreateInfo, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
|
| 2082 | alloc.m_StartValue = 0;
|
| 2083 |
|
| 2084 | allocations.push_back(alloc);
|
| 2085 | }
|
| 2086 |
|
| 2087 | // And all buffers
|
| 2088 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 2089 |
|
| 2090 | for(size_t i = 0; i < bufCount; ++i)
|
| 2091 | {
|
| 2092 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
|
| 2093 | bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 2094 |
|
| 2095 | AllocInfo alloc;
|
| 2096 | alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
|
| 2097 | alloc.m_StartValue = 0;
|
| 2098 |
|
| 2099 | allocations.push_back(alloc);
|
| 2100 | }
|
| 2101 |
|
| 2102 | // Destroy some percentage of them.
|
| 2103 | {
|
| 2104 | const size_t allocationsToDestroy = round_div<size_t>((imageCount + bufCount) * (100 - percentToLeave), 100);
|
| 2105 | for(size_t i = 0; i < allocationsToDestroy; ++i)
|
| 2106 | {
|
| 2107 | const size_t index = rand.Generate() % allocations.size();
|
| 2108 | allocations[index].Destroy();
|
| 2109 | allocations.erase(allocations.begin() + index);
|
| 2110 | }
|
| 2111 | }
|
| 2112 |
|
| 2113 | {
|
| 2114 | // Set our user data pointers. A real application should probably be more clever here
|
| 2115 | const size_t allocationCount = allocations.size();
|
| 2116 | for(size_t i = 0; i < allocationCount; ++i)
|
| 2117 | {
|
| 2118 | AllocInfo &alloc = allocations[i];
|
| 2119 | vmaSetAllocationUserData(g_hAllocator, alloc.m_Allocation, &alloc);
|
| 2120 | }
|
| 2121 | }
|
| 2122 |
|
| 2123 | // Fill them with meaningful data.
|
| 2124 | UploadGpuData(allocations.data(), allocations.size());
|
| 2125 |
|
| 2126 | wchar_t fileName[MAX_PATH];
|
| 2127 | swprintf_s(fileName, L"GPU_defragmentation_incremental_basic_A_before.json");
|
| 2128 | SaveAllocatorStatsToFile(fileName);
|
| 2129 |
|
| 2130 | // Defragment using GPU only.
|
| 2131 | {
|
| 2132 | const size_t allocCount = allocations.size();
|
| 2133 |
|
| 2134 | std::vector<VmaAllocation> allocationPtrs;
|
| 2135 |
|
| 2136 | for(size_t i = 0; i < allocCount; ++i)
|
| 2137 | {
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2138 | allocationPtrs.push_back(allocations[i].m_Allocation);
|
| 2139 | }
|
| 2140 |
|
| 2141 | const size_t movableAllocCount = allocationPtrs.size();
|
| 2142 |
|
| 2143 | VmaDefragmentationInfo2 defragInfo = {};
|
| 2144 | defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_INCREMENTAL;
|
| 2145 | defragInfo.allocationCount = (uint32_t)movableAllocCount;
|
| 2146 | defragInfo.pAllocations = allocationPtrs.data();
|
| 2147 | defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
|
| 2148 | defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
|
| 2149 |
|
| 2150 | VmaDefragmentationStats stats = {};
|
| 2151 | VmaDefragmentationContext ctx = VK_NULL_HANDLE;
|
| 2152 | VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
|
| 2153 | TEST(res >= VK_SUCCESS);
|
| 2154 |
|
| 2155 | res = VK_NOT_READY;
|
| 2156 |
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2157 | std::vector<VmaDefragmentationPassMoveInfo> moveInfo;
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2158 | moveInfo.resize(movableAllocCount);
|
| 2159 |
|
| 2160 | while(res == VK_NOT_READY)
|
| 2161 | {
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2162 | VmaDefragmentationPassInfo stepInfo = {};
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2163 | stepInfo.pMoves = moveInfo.data();
|
| 2164 | stepInfo.moveCount = (uint32_t)moveInfo.size();
|
| 2165 |
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2166 | res = vmaBeginDefragmentationPass(g_hAllocator, ctx, &stepInfo);
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2167 | TEST(res >= VK_SUCCESS);
|
| 2168 |
|
| 2169 | BeginSingleTimeCommands();
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2170 | std::vector<void*> newHandles;
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2171 | ProcessDefragmentationStepInfo(stepInfo);
|
| 2172 | EndSingleTimeCommands();
|
| 2173 |
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2174 | res = vmaEndDefragmentationPass(g_hAllocator, ctx);
|
| 2175 |
|
| 2176 | // Destroy old buffers/images and replace them with new handles.
|
| 2177 | for(size_t i = 0; i < stepInfo.moveCount; ++i)
|
| 2178 | {
|
| 2179 | VmaAllocation const alloc = stepInfo.pMoves[i].allocation;
|
| 2180 | VmaAllocationInfo vmaAllocInfo;
|
| 2181 | vmaGetAllocationInfo(g_hAllocator, alloc, &vmaAllocInfo);
|
| 2182 | AllocInfo* allocInfo = (AllocInfo*)vmaAllocInfo.pUserData;
|
| 2183 | if(allocInfo->m_Buffer)
|
| 2184 | {
|
| 2185 | assert(allocInfo->m_NewBuffer && !allocInfo->m_Image && !allocInfo->m_NewImage);
|
| 2186 | vkDestroyBuffer(g_hDevice, allocInfo->m_Buffer, g_Allocs);
|
| 2187 | allocInfo->m_Buffer = allocInfo->m_NewBuffer;
|
| 2188 | allocInfo->m_NewBuffer = VK_NULL_HANDLE;
|
| 2189 | }
|
| 2190 | else if(allocInfo->m_Image)
|
| 2191 | {
|
| 2192 | assert(allocInfo->m_NewImage && !allocInfo->m_Buffer && !allocInfo->m_NewBuffer);
|
| 2193 | vkDestroyImage(g_hDevice, allocInfo->m_Image, g_Allocs);
|
| 2194 | allocInfo->m_Image = allocInfo->m_NewImage;
|
| 2195 | allocInfo->m_NewImage = VK_NULL_HANDLE;
|
| 2196 | }
|
| 2197 | else
|
| 2198 | assert(0);
|
| 2199 | }
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2200 | }
|
| 2201 |
|
| 2202 | TEST(res >= VK_SUCCESS);
|
| 2203 | vmaDefragmentationEnd(g_hAllocator, ctx);
|
| 2204 |
|
| 2205 | // If corruption detection is enabled, GPU defragmentation may not work on
|
| 2206 | // memory types that have this detection active, e.g. on Intel.
|
| 2207 | #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
|
| 2208 | TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
|
| 2209 | TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
|
| 2210 | #endif
|
| 2211 | }
|
| 2212 |
|
| 2213 | //ValidateGpuData(allocations.data(), allocations.size());
|
| 2214 |
|
| 2215 | swprintf_s(fileName, L"GPU_defragmentation_incremental_basic_B_after.json");
|
| 2216 | SaveAllocatorStatsToFile(fileName);
|
| 2217 |
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2218 | // Destroy all remaining buffers and images.
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2219 | for(size_t i = allocations.size(); i--; )
|
| 2220 | {
|
| 2221 | allocations[i].Destroy();
|
| 2222 | }
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2223 | }
|
| 2224 |
|
| 2225 | void TestDefragmentationIncrementalComplex()
|
| 2226 | {
|
| 2227 | wprintf(L"Test defragmentation incremental complex\n");
|
Adam Sawicki | db4c163 | 2020-07-16 16:41:53 +0200 | [diff] [blame] | 2228 |
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2229 | std::vector<AllocInfo> allocations;
|
| 2230 |
|
| 2231 | // Create that many allocations to surely fill 3 new blocks of 256 MB.
|
| 2232 | const std::array<uint32_t, 3> imageSizes = { 256, 512, 1024 };
|
| 2233 | const VkDeviceSize bufSizeMin = 5ull * 1024 * 1024;
|
| 2234 | const VkDeviceSize bufSizeMax = 10ull * 1024 * 1024;
|
| 2235 | const VkDeviceSize totalSize = 3ull * 256 * 1024 * 1024;
|
| 2236 | const size_t imageCount = (size_t)(totalSize / (imageSizes[0] * imageSizes[0] * 4)) / 2;
|
| 2237 | const size_t bufCount = (size_t)(totalSize / bufSizeMin) / 2;
|
| 2238 | const size_t percentToLeave = 30;
|
| 2239 | RandomNumberGenerator rand = { 234522 };
|
| 2240 |
|
| 2241 | VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
|
| 2242 | imageInfo.imageType = VK_IMAGE_TYPE_2D;
|
| 2243 | imageInfo.extent.depth = 1;
|
| 2244 | imageInfo.mipLevels = 1;
|
| 2245 | imageInfo.arrayLayers = 1;
|
| 2246 | imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
|
| 2247 | imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
|
| 2248 | imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
|
| 2249 | imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
|
| 2250 | imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
| 2251 |
|
| 2252 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2253 | allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 2254 | allocCreateInfo.flags = 0;
|
| 2255 |
|
| 2256 | // Create all intended images.
|
| 2257 | for(size_t i = 0; i < imageCount; ++i)
|
| 2258 | {
|
| 2259 | const uint32_t size = imageSizes[rand.Generate() % 3];
|
| 2260 |
|
| 2261 | imageInfo.extent.width = size;
|
| 2262 | imageInfo.extent.height = size;
|
| 2263 |
|
| 2264 | AllocInfo alloc;
|
| 2265 | alloc.CreateImage(imageInfo, allocCreateInfo, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
|
| 2266 | alloc.m_StartValue = 0;
|
| 2267 |
|
| 2268 | allocations.push_back(alloc);
|
| 2269 | }
|
| 2270 |
|
| 2271 | // And all buffers
|
| 2272 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 2273 |
|
| 2274 | for(size_t i = 0; i < bufCount; ++i)
|
| 2275 | {
|
| 2276 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
|
| 2277 | bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 2278 |
|
| 2279 | AllocInfo alloc;
|
| 2280 | alloc.CreateBuffer(bufCreateInfo, allocCreateInfo);
|
| 2281 | alloc.m_StartValue = 0;
|
| 2282 |
|
| 2283 | allocations.push_back(alloc);
|
| 2284 | }
|
| 2285 |
|
| 2286 | // Destroy some percentage of them.
|
| 2287 | {
|
| 2288 | const size_t allocationsToDestroy = round_div<size_t>((imageCount + bufCount) * (100 - percentToLeave), 100);
|
| 2289 | for(size_t i = 0; i < allocationsToDestroy; ++i)
|
| 2290 | {
|
| 2291 | const size_t index = rand.Generate() % allocations.size();
|
| 2292 | allocations[index].Destroy();
|
| 2293 | allocations.erase(allocations.begin() + index);
|
| 2294 | }
|
| 2295 | }
|
| 2296 |
|
| 2297 | {
|
| 2298 | // Set our user data pointers. A real application should probably be more clever here
|
| 2299 | const size_t allocationCount = allocations.size();
|
| 2300 | for(size_t i = 0; i < allocationCount; ++i)
|
| 2301 | {
|
| 2302 | AllocInfo &alloc = allocations[i];
|
| 2303 | vmaSetAllocationUserData(g_hAllocator, alloc.m_Allocation, &alloc);
|
| 2304 | }
|
| 2305 | }
|
| 2306 |
|
| 2307 | // Fill them with meaningful data.
|
| 2308 | UploadGpuData(allocations.data(), allocations.size());
|
| 2309 |
|
| 2310 | wchar_t fileName[MAX_PATH];
|
| 2311 | swprintf_s(fileName, L"GPU_defragmentation_incremental_complex_A_before.json");
|
| 2312 | SaveAllocatorStatsToFile(fileName);
|
| 2313 |
|
| 2314 | std::vector<AllocInfo> additionalAllocations;
|
| 2315 |
|
| 2316 | #define MakeAdditionalAllocation() \
|
| 2317 | do { \
|
| 2318 | { \
|
| 2319 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16); \
|
| 2320 | bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT; \
|
| 2321 | \
|
| 2322 | AllocInfo alloc; \
|
| 2323 | alloc.CreateBuffer(bufCreateInfo, allocCreateInfo); \
|
| 2324 | \
|
| 2325 | additionalAllocations.push_back(alloc); \
|
| 2326 | } \
|
| 2327 | } while(0)
|
| 2328 |
|
| 2329 | // Defragment using GPU only.
|
| 2330 | {
|
| 2331 | const size_t allocCount = allocations.size();
|
| 2332 |
|
| 2333 | std::vector<VmaAllocation> allocationPtrs;
|
| 2334 |
|
| 2335 | for(size_t i = 0; i < allocCount; ++i)
|
| 2336 | {
|
| 2337 | VmaAllocationInfo allocInfo = {};
|
| 2338 | vmaGetAllocationInfo(g_hAllocator, allocations[i].m_Allocation, &allocInfo);
|
| 2339 |
|
| 2340 | allocationPtrs.push_back(allocations[i].m_Allocation);
|
| 2341 | }
|
| 2342 |
|
| 2343 | const size_t movableAllocCount = allocationPtrs.size();
|
| 2344 |
|
| 2345 | VmaDefragmentationInfo2 defragInfo = {};
|
| 2346 | defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_INCREMENTAL;
|
| 2347 | defragInfo.allocationCount = (uint32_t)movableAllocCount;
|
| 2348 | defragInfo.pAllocations = allocationPtrs.data();
|
| 2349 | defragInfo.maxGpuBytesToMove = VK_WHOLE_SIZE;
|
| 2350 | defragInfo.maxGpuAllocationsToMove = UINT32_MAX;
|
| 2351 |
|
| 2352 | VmaDefragmentationStats stats = {};
|
| 2353 | VmaDefragmentationContext ctx = VK_NULL_HANDLE;
|
| 2354 | VkResult res = vmaDefragmentationBegin(g_hAllocator, &defragInfo, &stats, &ctx);
|
| 2355 | TEST(res >= VK_SUCCESS);
|
| 2356 |
|
| 2357 | res = VK_NOT_READY;
|
| 2358 |
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2359 | std::vector<VmaDefragmentationPassMoveInfo> moveInfo;
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2360 | moveInfo.resize(movableAllocCount);
|
| 2361 |
|
| 2362 | MakeAdditionalAllocation();
|
| 2363 |
|
| 2364 | while(res == VK_NOT_READY)
|
| 2365 | {
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2366 | VmaDefragmentationPassInfo stepInfo = {};
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2367 | stepInfo.pMoves = moveInfo.data();
|
| 2368 | stepInfo.moveCount = (uint32_t)moveInfo.size();
|
| 2369 |
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2370 | res = vmaBeginDefragmentationPass(g_hAllocator, ctx, &stepInfo);
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2371 | TEST(res >= VK_SUCCESS);
|
| 2372 |
|
| 2373 | MakeAdditionalAllocation();
|
| 2374 |
|
| 2375 | BeginSingleTimeCommands();
|
| 2376 | ProcessDefragmentationStepInfo(stepInfo);
|
| 2377 | EndSingleTimeCommands();
|
| 2378 |
|
Adam Sawicki | c467e28 | 2019-12-23 16:38:31 +0100 | [diff] [blame] | 2379 | res = vmaEndDefragmentationPass(g_hAllocator, ctx);
|
| 2380 |
|
| 2381 | // Destroy old buffers/images and replace them with new handles.
|
| 2382 | for(size_t i = 0; i < stepInfo.moveCount; ++i)
|
| 2383 | {
|
| 2384 | VmaAllocation const alloc = stepInfo.pMoves[i].allocation;
|
| 2385 | VmaAllocationInfo vmaAllocInfo;
|
| 2386 | vmaGetAllocationInfo(g_hAllocator, alloc, &vmaAllocInfo);
|
| 2387 | AllocInfo* allocInfo = (AllocInfo*)vmaAllocInfo.pUserData;
|
| 2388 | if(allocInfo->m_Buffer)
|
| 2389 | {
|
| 2390 | assert(allocInfo->m_NewBuffer && !allocInfo->m_Image && !allocInfo->m_NewImage);
|
| 2391 | vkDestroyBuffer(g_hDevice, allocInfo->m_Buffer, g_Allocs);
|
| 2392 | allocInfo->m_Buffer = allocInfo->m_NewBuffer;
|
| 2393 | allocInfo->m_NewBuffer = VK_NULL_HANDLE;
|
| 2394 | }
|
| 2395 | else if(allocInfo->m_Image)
|
| 2396 | {
|
| 2397 | assert(allocInfo->m_NewImage && !allocInfo->m_Buffer && !allocInfo->m_NewBuffer);
|
| 2398 | vkDestroyImage(g_hDevice, allocInfo->m_Image, g_Allocs);
|
| 2399 | allocInfo->m_Image = allocInfo->m_NewImage;
|
| 2400 | allocInfo->m_NewImage = VK_NULL_HANDLE;
|
| 2401 | }
|
| 2402 | else
|
| 2403 | assert(0);
|
| 2404 | }
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2405 |
|
| 2406 | MakeAdditionalAllocation();
|
| 2407 | }
|
| 2408 |
|
| 2409 | TEST(res >= VK_SUCCESS);
|
| 2410 | vmaDefragmentationEnd(g_hAllocator, ctx);
|
| 2411 |
|
| 2412 | // If corruption detection is enabled, GPU defragmentation may not work on
|
| 2413 | // memory types that have this detection active, e.g. on Intel.
|
| 2414 | #if !defined(VMA_DEBUG_DETECT_CORRUPTION) || VMA_DEBUG_DETECT_CORRUPTION == 0
|
| 2415 | TEST(stats.allocationsMoved > 0 && stats.bytesMoved > 0);
|
| 2416 | TEST(stats.deviceMemoryBlocksFreed > 0 && stats.bytesFreed > 0);
|
| 2417 | #endif
|
| 2418 | }
|
| 2419 |
|
| 2420 | //ValidateGpuData(allocations.data(), allocations.size());
|
| 2421 |
|
| 2422 | swprintf_s(fileName, L"GPU_defragmentation_incremental_complex_B_after.json");
|
| 2423 | SaveAllocatorStatsToFile(fileName);
|
| 2424 |
|
| 2425 | // Destroy all remaining buffers.
|
| 2426 | for(size_t i = allocations.size(); i--; )
|
| 2427 | {
|
| 2428 | allocations[i].Destroy();
|
| 2429 | }
|
| 2430 |
|
| 2431 | for(size_t i = additionalAllocations.size(); i--; )
|
| 2432 | {
|
| 2433 | additionalAllocations[i].Destroy();
|
| 2434 | }
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 2435 | }
|
| 2436 |
|
| 2437 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2438 | static void TestUserData()
|
| 2439 | {
|
| 2440 | VkResult res;
|
| 2441 |
|
| 2442 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 2443 | bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
|
| 2444 | bufCreateInfo.size = 0x10000;
|
| 2445 |
|
| 2446 | for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
|
| 2447 | {
|
| 2448 | // Opaque pointer
|
| 2449 | {
|
| 2450 |
|
| 2451 | void* numberAsPointer = (void*)(size_t)0xC2501FF3u;
|
| 2452 | void* pointerToSomething = &res;
|
| 2453 |
|
| 2454 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2455 | allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 2456 | allocCreateInfo.pUserData = numberAsPointer;
|
| 2457 | if(testIndex == 1)
|
| 2458 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
|
| 2459 |
|
| 2460 | VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
|
| 2461 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2462 | TEST(res == VK_SUCCESS);
|
| 2463 | TEST(allocInfo.pUserData = numberAsPointer);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2464 |
|
| 2465 | vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2466 | TEST(allocInfo.pUserData == numberAsPointer);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2467 |
|
| 2468 | vmaSetAllocationUserData(g_hAllocator, alloc, pointerToSomething);
|
| 2469 | vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2470 | TEST(allocInfo.pUserData == pointerToSomething);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2471 |
|
| 2472 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 2473 | }
|
| 2474 |
|
| 2475 | // String
|
| 2476 | {
|
| 2477 | const char* name1 = "Buffer name \\\"\'<>&% \nSecond line .,;=";
|
| 2478 | const char* name2 = "2";
|
| 2479 | const size_t name1Len = strlen(name1);
|
| 2480 |
|
| 2481 | char* name1Buf = new char[name1Len + 1];
|
| 2482 | strcpy_s(name1Buf, name1Len + 1, name1);
|
| 2483 |
|
| 2484 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2485 | allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 2486 | allocCreateInfo.flags = VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT;
|
| 2487 | allocCreateInfo.pUserData = name1Buf;
|
| 2488 | if(testIndex == 1)
|
| 2489 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
|
| 2490 |
|
| 2491 | VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
|
| 2492 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2493 | TEST(res == VK_SUCCESS);
|
| 2494 | TEST(allocInfo.pUserData != nullptr && allocInfo.pUserData != name1Buf);
|
| 2495 | TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2496 |
|
| 2497 | delete[] name1Buf;
|
| 2498 |
|
| 2499 | vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2500 | TEST(strcmp(name1, (const char*)allocInfo.pUserData) == 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2501 |
|
| 2502 | vmaSetAllocationUserData(g_hAllocator, alloc, (void*)name2);
|
| 2503 | vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2504 | TEST(strcmp(name2, (const char*)allocInfo.pUserData) == 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2505 |
|
| 2506 | vmaSetAllocationUserData(g_hAllocator, alloc, nullptr);
|
| 2507 | vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2508 | TEST(allocInfo.pUserData == nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2509 |
|
| 2510 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 2511 | }
|
| 2512 | }
|
| 2513 | }
|
| 2514 |
|
Adam Sawicki | 370ab18 | 2018-11-08 16:31:00 +0100 | [diff] [blame] | 2515 | static void TestInvalidAllocations()
|
| 2516 | {
|
| 2517 | VkResult res;
|
| 2518 |
|
| 2519 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2520 | allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 2521 |
|
| 2522 | // Try to allocate 0 bytes.
|
| 2523 | {
|
| 2524 | VkMemoryRequirements memReq = {};
|
| 2525 | memReq.size = 0; // !!!
|
| 2526 | memReq.alignment = 4;
|
| 2527 | memReq.memoryTypeBits = UINT32_MAX;
|
| 2528 | VmaAllocation alloc = VK_NULL_HANDLE;
|
| 2529 | res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
|
| 2530 | TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && alloc == VK_NULL_HANDLE);
|
| 2531 | }
|
| 2532 |
|
| 2533 | // Try to create buffer with size = 0.
|
| 2534 | {
|
| 2535 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 2536 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 2537 | bufCreateInfo.size = 0; // !!!
|
| 2538 | VkBuffer buf = VK_NULL_HANDLE;
|
| 2539 | VmaAllocation alloc = VK_NULL_HANDLE;
|
| 2540 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr);
|
| 2541 | TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && buf == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
|
| 2542 | }
|
| 2543 |
|
| 2544 | // Try to create image with one dimension = 0.
|
| 2545 | {
|
| 2546 | VkImageCreateInfo imageCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 2547 | imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
|
| 2548 | imageCreateInfo.format = VK_FORMAT_B8G8R8A8_UNORM;
|
| 2549 | imageCreateInfo.extent.width = 128;
|
| 2550 | imageCreateInfo.extent.height = 0; // !!!
|
| 2551 | imageCreateInfo.extent.depth = 1;
|
| 2552 | imageCreateInfo.mipLevels = 1;
|
| 2553 | imageCreateInfo.arrayLayers = 1;
|
| 2554 | imageCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
| 2555 | imageCreateInfo.tiling = VK_IMAGE_TILING_LINEAR;
|
| 2556 | imageCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
|
| 2557 | imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
|
| 2558 | VkImage image = VK_NULL_HANDLE;
|
| 2559 | VmaAllocation alloc = VK_NULL_HANDLE;
|
| 2560 | res = vmaCreateImage(g_hAllocator, &imageCreateInfo, &allocCreateInfo, &image, &alloc, nullptr);
|
| 2561 | TEST(res == VK_ERROR_VALIDATION_FAILED_EXT && image == VK_NULL_HANDLE && alloc == VK_NULL_HANDLE);
|
| 2562 | }
|
| 2563 | }
|
| 2564 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2565 | static void TestMemoryRequirements()
|
| 2566 | {
|
| 2567 | VkResult res;
|
| 2568 | VkBuffer buf;
|
| 2569 | VmaAllocation alloc;
|
| 2570 | VmaAllocationInfo allocInfo;
|
| 2571 |
|
| 2572 | const VkPhysicalDeviceMemoryProperties* memProps;
|
| 2573 | vmaGetMemoryProperties(g_hAllocator, &memProps);
|
| 2574 |
|
| 2575 | VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 2576 | bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 2577 | bufInfo.size = 128;
|
| 2578 |
|
| 2579 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2580 |
|
| 2581 | // No requirements.
|
| 2582 | res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2583 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2584 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 2585 |
|
| 2586 | // Usage.
|
| 2587 | allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 2588 | allocCreateInfo.requiredFlags = 0;
|
| 2589 | allocCreateInfo.preferredFlags = 0;
|
| 2590 | allocCreateInfo.memoryTypeBits = UINT32_MAX;
|
| 2591 |
|
| 2592 | res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2593 | TEST(res == VK_SUCCESS);
|
| 2594 | TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2595 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 2596 |
|
| 2597 | // Required flags, preferred flags.
|
| 2598 | allocCreateInfo.usage = VMA_MEMORY_USAGE_UNKNOWN;
|
| 2599 | allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
| 2600 | allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
|
| 2601 | allocCreateInfo.memoryTypeBits = 0;
|
| 2602 |
|
| 2603 | res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2604 | TEST(res == VK_SUCCESS);
|
| 2605 | TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
|
| 2606 | TEST(memProps->memoryTypes[allocInfo.memoryType].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2607 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 2608 |
|
| 2609 | // memoryTypeBits.
|
| 2610 | const uint32_t memType = allocInfo.memoryType;
|
| 2611 | allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 2612 | allocCreateInfo.requiredFlags = 0;
|
| 2613 | allocCreateInfo.preferredFlags = 0;
|
| 2614 | allocCreateInfo.memoryTypeBits = 1u << memType;
|
| 2615 |
|
| 2616 | res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2617 | TEST(res == VK_SUCCESS);
|
| 2618 | TEST(allocInfo.memoryType == memType);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2619 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 2620 |
|
| 2621 | }
|
| 2622 |
|
Adam Sawicki | a1d992f | 2020-03-02 15:32:10 +0100 | [diff] [blame] | 2623 | static void TestGetAllocatorInfo()
|
| 2624 | {
|
| 2625 | wprintf(L"Test vnaGetAllocatorInfo\n");
|
| 2626 |
|
| 2627 | VmaAllocatorInfo allocInfo = {};
|
| 2628 | vmaGetAllocatorInfo(g_hAllocator, &allocInfo);
|
| 2629 | TEST(allocInfo.instance == g_hVulkanInstance);
|
| 2630 | TEST(allocInfo.physicalDevice == g_hPhysicalDevice);
|
| 2631 | TEST(allocInfo.device == g_hDevice);
|
| 2632 | }
|
| 2633 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2634 | static void TestBasics()
|
| 2635 | {
|
Adam Sawicki | aaa1a56 | 2020-06-24 17:41:09 +0200 | [diff] [blame] | 2636 | wprintf(L"Test basics\n");
|
| 2637 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2638 | VkResult res;
|
| 2639 |
|
Adam Sawicki | a1d992f | 2020-03-02 15:32:10 +0100 | [diff] [blame] | 2640 | TestGetAllocatorInfo();
|
| 2641 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2642 | TestMemoryRequirements();
|
| 2643 |
|
| 2644 | // Lost allocation
|
| 2645 | {
|
| 2646 | VmaAllocation alloc = VK_NULL_HANDLE;
|
| 2647 | vmaCreateLostAllocation(g_hAllocator, &alloc);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2648 | TEST(alloc != VK_NULL_HANDLE);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2649 |
|
| 2650 | VmaAllocationInfo allocInfo;
|
| 2651 | vmaGetAllocationInfo(g_hAllocator, alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2652 | TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
|
| 2653 | TEST(allocInfo.size == 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2654 |
|
| 2655 | vmaFreeMemory(g_hAllocator, alloc);
|
| 2656 | }
|
| 2657 |
|
| 2658 | // Allocation that is MAPPED and not necessarily HOST_VISIBLE.
|
| 2659 | {
|
| 2660 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 2661 | bufCreateInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
|
| 2662 | bufCreateInfo.size = 128;
|
| 2663 |
|
| 2664 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2665 | allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 2666 | allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
|
| 2667 |
|
| 2668 | VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo;
|
| 2669 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2670 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2671 |
|
| 2672 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 2673 |
|
| 2674 | // Same with OWN_MEMORY.
|
| 2675 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
|
| 2676 |
|
| 2677 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2678 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2679 |
|
| 2680 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 2681 | }
|
| 2682 |
|
| 2683 | TestUserData();
|
Adam Sawicki | 370ab18 | 2018-11-08 16:31:00 +0100 | [diff] [blame] | 2684 |
|
| 2685 | TestInvalidAllocations();
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2686 | }
|
| 2687 |
|
Adam Sawicki | aaa1a56 | 2020-06-24 17:41:09 +0200 | [diff] [blame] | 2688 | static void TestAllocationVersusResourceSize()
|
| 2689 | {
|
| 2690 | wprintf(L"Test allocation versus resource size\n");
|
| 2691 |
|
| 2692 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 2693 | bufCreateInfo.size = 22921; // Prime number
|
| 2694 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
| 2695 |
|
| 2696 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2697 | allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 2698 |
|
| 2699 | for(uint32_t i = 0; i < 2; ++i)
|
| 2700 | {
|
| 2701 | allocCreateInfo.flags = (i == 1) ? VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT : 0;
|
| 2702 |
|
| 2703 | AllocInfo info;
|
| 2704 | info.CreateBuffer(bufCreateInfo, allocCreateInfo);
|
| 2705 |
|
| 2706 | VmaAllocationInfo allocInfo = {};
|
| 2707 | vmaGetAllocationInfo(g_hAllocator, info.m_Allocation, &allocInfo);
|
| 2708 | //wprintf(L" Buffer size = %llu, allocation size = %llu\n", bufCreateInfo.size, allocInfo.size);
|
| 2709 |
|
| 2710 | // Map and test accessing entire area of the allocation, not only the buffer.
|
| 2711 | void* mappedPtr = nullptr;
|
| 2712 | VkResult res = vmaMapMemory(g_hAllocator, info.m_Allocation, &mappedPtr);
|
| 2713 | TEST(res == VK_SUCCESS);
|
| 2714 |
|
| 2715 | memset(mappedPtr, 0xCC, (size_t)allocInfo.size);
|
| 2716 |
|
| 2717 | vmaUnmapMemory(g_hAllocator, info.m_Allocation);
|
| 2718 |
|
| 2719 | info.Destroy();
|
| 2720 | }
|
| 2721 | }
|
| 2722 |
|
Adam Sawicki | ddcbf8c | 2019-11-22 15:22:42 +0100 | [diff] [blame] | 2723 | static void TestPool_MinBlockCount()
|
| 2724 | {
|
| 2725 | #if defined(VMA_DEBUG_MARGIN) && VMA_DEBUG_MARGIN > 0
|
| 2726 | return;
|
| 2727 | #endif
|
| 2728 |
|
| 2729 | wprintf(L"Test Pool MinBlockCount\n");
|
| 2730 | VkResult res;
|
| 2731 |
|
| 2732 | static const VkDeviceSize ALLOC_SIZE = 512ull * 1024;
|
| 2733 | static const VkDeviceSize BLOCK_SIZE = ALLOC_SIZE * 2; // Each block can fit 2 allocations.
|
| 2734 |
|
| 2735 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2736 | allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_COPY;
|
| 2737 |
|
| 2738 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 2739 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 2740 | bufCreateInfo.size = ALLOC_SIZE;
|
| 2741 |
|
| 2742 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 2743 | poolCreateInfo.blockSize = BLOCK_SIZE;
|
| 2744 | poolCreateInfo.minBlockCount = 2; // At least 2 blocks always present.
|
| 2745 | res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
|
| 2746 | TEST(res == VK_SUCCESS);
|
| 2747 |
|
| 2748 | VmaPool pool = VK_NULL_HANDLE;
|
| 2749 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
|
| 2750 | TEST(res == VK_SUCCESS && pool != VK_NULL_HANDLE);
|
| 2751 |
|
| 2752 | // Check that there are 2 blocks preallocated as requested.
|
| 2753 | VmaPoolStats begPoolStats = {};
|
| 2754 | vmaGetPoolStats(g_hAllocator, pool, &begPoolStats);
|
| 2755 | TEST(begPoolStats.blockCount == 2 && begPoolStats.allocationCount == 0 && begPoolStats.size == BLOCK_SIZE * 2);
|
| 2756 |
|
| 2757 | // Allocate 5 buffers to create 3 blocks.
|
| 2758 | static const uint32_t BUF_COUNT = 5;
|
| 2759 | allocCreateInfo.pool = pool;
|
| 2760 | std::vector<AllocInfo> allocs(BUF_COUNT);
|
| 2761 | for(uint32_t i = 0; i < BUF_COUNT; ++i)
|
| 2762 | {
|
| 2763 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &allocs[i].m_Buffer, &allocs[i].m_Allocation, nullptr);
|
| 2764 | TEST(res == VK_SUCCESS && allocs[i].m_Buffer != VK_NULL_HANDLE && allocs[i].m_Allocation != VK_NULL_HANDLE);
|
| 2765 | }
|
| 2766 |
|
| 2767 | // Check that there are really 3 blocks.
|
| 2768 | VmaPoolStats poolStats2 = {};
|
| 2769 | vmaGetPoolStats(g_hAllocator, pool, &poolStats2);
|
| 2770 | TEST(poolStats2.blockCount == 3 && poolStats2.allocationCount == BUF_COUNT && poolStats2.size == BLOCK_SIZE * 3);
|
| 2771 |
|
| 2772 | // Free two first allocations to make one block empty.
|
| 2773 | allocs[0].Destroy();
|
| 2774 | allocs[1].Destroy();
|
| 2775 |
|
| 2776 | // Check that there are still 3 blocks due to hysteresis.
|
| 2777 | VmaPoolStats poolStats3 = {};
|
| 2778 | vmaGetPoolStats(g_hAllocator, pool, &poolStats3);
|
| 2779 | TEST(poolStats3.blockCount == 3 && poolStats3.allocationCount == BUF_COUNT - 2 && poolStats2.size == BLOCK_SIZE * 3);
|
| 2780 |
|
| 2781 | // Free the last allocation to make second block empty.
|
| 2782 | allocs[BUF_COUNT - 1].Destroy();
|
| 2783 |
|
| 2784 | // Check that there are now 2 blocks only.
|
| 2785 | VmaPoolStats poolStats4 = {};
|
| 2786 | vmaGetPoolStats(g_hAllocator, pool, &poolStats4);
|
| 2787 | TEST(poolStats4.blockCount == 2 && poolStats4.allocationCount == BUF_COUNT - 3 && poolStats4.size == BLOCK_SIZE * 2);
|
| 2788 |
|
| 2789 | // Cleanup.
|
| 2790 | for(size_t i = allocs.size(); i--; )
|
| 2791 | {
|
| 2792 | allocs[i].Destroy();
|
| 2793 | }
|
| 2794 | vmaDestroyPool(g_hAllocator, pool);
|
| 2795 | }
|
| 2796 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2797 | void TestHeapSizeLimit()
|
| 2798 | {
|
Adam Sawicki | fbaccff | 2020-03-09 17:09:23 +0100 | [diff] [blame] | 2799 | const VkDeviceSize HEAP_SIZE_LIMIT = 100ull * 1024 * 1024; // 100 MB
|
| 2800 | const VkDeviceSize BLOCK_SIZE = 10ull * 1024 * 1024; // 10 MB
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2801 |
|
| 2802 | VkDeviceSize heapSizeLimit[VK_MAX_MEMORY_HEAPS];
|
| 2803 | for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
|
| 2804 | {
|
| 2805 | heapSizeLimit[i] = HEAP_SIZE_LIMIT;
|
| 2806 | }
|
| 2807 |
|
| 2808 | VmaAllocatorCreateInfo allocatorCreateInfo = {};
|
| 2809 | allocatorCreateInfo.physicalDevice = g_hPhysicalDevice;
|
| 2810 | allocatorCreateInfo.device = g_hDevice;
|
Adam Sawicki | 4ac8ff8 | 2019-11-18 14:47:33 +0100 | [diff] [blame] | 2811 | allocatorCreateInfo.instance = g_hVulkanInstance;
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2812 | allocatorCreateInfo.pHeapSizeLimit = heapSizeLimit;
|
| 2813 |
|
| 2814 | VmaAllocator hAllocator;
|
| 2815 | VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &hAllocator);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2816 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2817 |
|
| 2818 | struct Item
|
| 2819 | {
|
| 2820 | VkBuffer hBuf;
|
| 2821 | VmaAllocation hAlloc;
|
| 2822 | };
|
| 2823 | std::vector<Item> items;
|
| 2824 |
|
| 2825 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 2826 | bufCreateInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
|
| 2827 |
|
Adam Sawicki | 4ac8ff8 | 2019-11-18 14:47:33 +0100 | [diff] [blame] | 2828 | // 1. Allocate two blocks of dedicated memory, half the size of BLOCK_SIZE.
|
| 2829 | VmaAllocationInfo dedicatedAllocInfo;
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2830 | {
|
| 2831 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2832 | allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 2833 | allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
|
| 2834 |
|
| 2835 | bufCreateInfo.size = BLOCK_SIZE / 2;
|
| 2836 |
|
| 2837 | for(size_t i = 0; i < 2; ++i)
|
| 2838 | {
|
| 2839 | Item item;
|
Adam Sawicki | 4ac8ff8 | 2019-11-18 14:47:33 +0100 | [diff] [blame] | 2840 | res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, &dedicatedAllocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2841 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2842 | items.push_back(item);
|
| 2843 | }
|
| 2844 | }
|
| 2845 |
|
| 2846 | // Create pool to make sure allocations must be out of this memory type.
|
| 2847 | VmaPoolCreateInfo poolCreateInfo = {};
|
Adam Sawicki | 4ac8ff8 | 2019-11-18 14:47:33 +0100 | [diff] [blame] | 2848 | poolCreateInfo.memoryTypeIndex = dedicatedAllocInfo.memoryType;
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2849 | poolCreateInfo.blockSize = BLOCK_SIZE;
|
| 2850 |
|
| 2851 | VmaPool hPool;
|
| 2852 | res = vmaCreatePool(hAllocator, &poolCreateInfo, &hPool);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2853 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2854 |
|
| 2855 | // 2. Allocate normal buffers from all the remaining memory.
|
| 2856 | {
|
| 2857 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2858 | allocCreateInfo.pool = hPool;
|
| 2859 |
|
| 2860 | bufCreateInfo.size = BLOCK_SIZE / 2;
|
| 2861 |
|
| 2862 | const size_t bufCount = ((HEAP_SIZE_LIMIT / BLOCK_SIZE) - 1) * 2;
|
| 2863 | for(size_t i = 0; i < bufCount; ++i)
|
| 2864 | {
|
| 2865 | Item item;
|
| 2866 | res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &item.hBuf, &item.hAlloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2867 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2868 | items.push_back(item);
|
| 2869 | }
|
| 2870 | }
|
| 2871 |
|
| 2872 | // 3. Allocation of one more (even small) buffer should fail.
|
| 2873 | {
|
| 2874 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2875 | allocCreateInfo.pool = hPool;
|
| 2876 |
|
| 2877 | bufCreateInfo.size = 128;
|
| 2878 |
|
| 2879 | VkBuffer hBuf;
|
| 2880 | VmaAllocation hAlloc;
|
| 2881 | res = vmaCreateBuffer(hAllocator, &bufCreateInfo, &allocCreateInfo, &hBuf, &hAlloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2882 | TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 2883 | }
|
| 2884 |
|
| 2885 | // Destroy everything.
|
| 2886 | for(size_t i = items.size(); i--; )
|
| 2887 | {
|
| 2888 | vmaDestroyBuffer(hAllocator, items[i].hBuf, items[i].hAlloc);
|
| 2889 | }
|
| 2890 |
|
| 2891 | vmaDestroyPool(hAllocator, hPool);
|
| 2892 |
|
| 2893 | vmaDestroyAllocator(hAllocator);
|
| 2894 | }
|
| 2895 |
|
Adam Sawicki | 212a4a6 | 2018-06-14 15:44:45 +0200 | [diff] [blame] | 2896 | #if VMA_DEBUG_MARGIN
|
Adam Sawicki | 73b1665 | 2018-06-11 16:39:25 +0200 | [diff] [blame] | 2897 | static void TestDebugMargin()
|
| 2898 | {
|
| 2899 | if(VMA_DEBUG_MARGIN == 0)
|
| 2900 | {
|
| 2901 | return;
|
| 2902 | }
|
| 2903 |
|
| 2904 | VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
Adam Sawicki | 212a4a6 | 2018-06-14 15:44:45 +0200 | [diff] [blame] | 2905 | bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
Adam Sawicki | 73b1665 | 2018-06-11 16:39:25 +0200 | [diff] [blame] | 2906 |
|
| 2907 | VmaAllocationCreateInfo allocCreateInfo = {};
|
Adam Sawicki | 212a4a6 | 2018-06-14 15:44:45 +0200 | [diff] [blame] | 2908 | allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
Adam Sawicki | 73b1665 | 2018-06-11 16:39:25 +0200 | [diff] [blame] | 2909 |
|
| 2910 | // Create few buffers of different size.
|
| 2911 | const size_t BUF_COUNT = 10;
|
| 2912 | BufferInfo buffers[BUF_COUNT];
|
| 2913 | VmaAllocationInfo allocInfo[BUF_COUNT];
|
| 2914 | for(size_t i = 0; i < 10; ++i)
|
| 2915 | {
|
| 2916 | bufInfo.size = (VkDeviceSize)(i + 1) * 64;
|
Adam Sawicki | 212a4a6 | 2018-06-14 15:44:45 +0200 | [diff] [blame] | 2917 | // Last one will be mapped.
|
| 2918 | allocCreateInfo.flags = (i == BUF_COUNT - 1) ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
|
Adam Sawicki | 73b1665 | 2018-06-11 16:39:25 +0200 | [diff] [blame] | 2919 |
|
| 2920 | VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo, &buffers[i].Buffer, &buffers[i].Allocation, &allocInfo[i]);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2921 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 73b1665 | 2018-06-11 16:39:25 +0200 | [diff] [blame] | 2922 | // Margin is preserved also at the beginning of a block.
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2923 | TEST(allocInfo[i].offset >= VMA_DEBUG_MARGIN);
|
Adam Sawicki | 212a4a6 | 2018-06-14 15:44:45 +0200 | [diff] [blame] | 2924 |
|
| 2925 | if(i == BUF_COUNT - 1)
|
| 2926 | {
|
| 2927 | // Fill with data.
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2928 | TEST(allocInfo[i].pMappedData != nullptr);
|
Adam Sawicki | 212a4a6 | 2018-06-14 15:44:45 +0200 | [diff] [blame] | 2929 | // Uncomment this "+ 1" to overwrite past end of allocation and check corruption detection.
|
| 2930 | memset(allocInfo[i].pMappedData, 0xFF, bufInfo.size /* + 1 */);
|
| 2931 | }
|
Adam Sawicki | 73b1665 | 2018-06-11 16:39:25 +0200 | [diff] [blame] | 2932 | }
|
| 2933 |
|
| 2934 | // Check if their offsets preserve margin between them.
|
| 2935 | std::sort(allocInfo, allocInfo + BUF_COUNT, [](const VmaAllocationInfo& lhs, const VmaAllocationInfo& rhs) -> bool
|
| 2936 | {
|
| 2937 | if(lhs.deviceMemory != rhs.deviceMemory)
|
| 2938 | {
|
| 2939 | return lhs.deviceMemory < rhs.deviceMemory;
|
| 2940 | }
|
| 2941 | return lhs.offset < rhs.offset;
|
| 2942 | });
|
| 2943 | for(size_t i = 1; i < BUF_COUNT; ++i)
|
| 2944 | {
|
| 2945 | if(allocInfo[i].deviceMemory == allocInfo[i - 1].deviceMemory)
|
| 2946 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2947 | TEST(allocInfo[i].offset >= allocInfo[i - 1].offset + VMA_DEBUG_MARGIN);
|
Adam Sawicki | 73b1665 | 2018-06-11 16:39:25 +0200 | [diff] [blame] | 2948 | }
|
| 2949 | }
|
| 2950 |
|
Adam Sawicki | 212a4a6 | 2018-06-14 15:44:45 +0200 | [diff] [blame] | 2951 | VkResult res = vmaCheckCorruption(g_hAllocator, UINT32_MAX);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2952 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 212a4a6 | 2018-06-14 15:44:45 +0200 | [diff] [blame] | 2953 |
|
Adam Sawicki | 73b1665 | 2018-06-11 16:39:25 +0200 | [diff] [blame] | 2954 | // Destroy all buffers.
|
| 2955 | for(size_t i = BUF_COUNT; i--; )
|
| 2956 | {
|
| 2957 | vmaDestroyBuffer(g_hAllocator, buffers[i].Buffer, buffers[i].Allocation);
|
| 2958 | }
|
| 2959 | }
|
Adam Sawicki | 212a4a6 | 2018-06-14 15:44:45 +0200 | [diff] [blame] | 2960 | #endif
|
Adam Sawicki | 73b1665 | 2018-06-11 16:39:25 +0200 | [diff] [blame] | 2961 |
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 2962 | static void TestLinearAllocator()
|
| 2963 | {
|
| 2964 | wprintf(L"Test linear allocator\n");
|
| 2965 |
|
| 2966 | RandomNumberGenerator rand{645332};
|
| 2967 |
|
| 2968 | VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 2969 | sampleBufCreateInfo.size = 1024; // Whatever.
|
| 2970 | sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
|
| 2971 |
|
| 2972 | VmaAllocationCreateInfo sampleAllocCreateInfo = {};
|
| 2973 | sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 2974 |
|
| 2975 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 2976 | VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2977 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 2978 |
|
Adam Sawicki | ee08277 | 2018-06-20 17:45:49 +0200 | [diff] [blame] | 2979 | poolCreateInfo.blockSize = 1024 * 300;
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 2980 | poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
|
| 2981 | poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
|
| 2982 |
|
| 2983 | VmaPool pool = nullptr;
|
| 2984 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 2985 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 2986 |
|
| 2987 | VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
|
| 2988 |
|
| 2989 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 2990 | allocCreateInfo.pool = pool;
|
| 2991 |
|
| 2992 | constexpr size_t maxBufCount = 100;
|
| 2993 | std::vector<BufferInfo> bufInfo;
|
| 2994 |
|
| 2995 | constexpr VkDeviceSize bufSizeMin = 16;
|
| 2996 | constexpr VkDeviceSize bufSizeMax = 1024;
|
| 2997 | VmaAllocationInfo allocInfo;
|
| 2998 | VkDeviceSize prevOffset = 0;
|
| 2999 |
|
| 3000 | // Test one-time free.
|
| 3001 | for(size_t i = 0; i < 2; ++i)
|
| 3002 | {
|
| 3003 | // Allocate number of buffers of varying size that surely fit into this block.
|
| 3004 | VkDeviceSize bufSumSize = 0;
|
| 3005 | for(size_t i = 0; i < maxBufCount; ++i)
|
| 3006 | {
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3007 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 3008 | BufferInfo newBufInfo;
|
| 3009 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3010 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3011 | TEST(res == VK_SUCCESS);
|
| 3012 | TEST(i == 0 || allocInfo.offset > prevOffset);
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 3013 | bufInfo.push_back(newBufInfo);
|
| 3014 | prevOffset = allocInfo.offset;
|
| 3015 | bufSumSize += bufCreateInfo.size;
|
| 3016 | }
|
| 3017 |
|
| 3018 | // Validate pool stats.
|
| 3019 | VmaPoolStats stats;
|
| 3020 | vmaGetPoolStats(g_hAllocator, pool, &stats);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3021 | TEST(stats.size == poolCreateInfo.blockSize);
|
| 3022 | TEST(stats.unusedSize = poolCreateInfo.blockSize - bufSumSize);
|
| 3023 | TEST(stats.allocationCount == bufInfo.size());
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 3024 |
|
| 3025 | // Destroy the buffers in random order.
|
| 3026 | while(!bufInfo.empty())
|
| 3027 | {
|
| 3028 | const size_t indexToDestroy = rand.Generate() % bufInfo.size();
|
| 3029 | const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
|
| 3030 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3031 | bufInfo.erase(bufInfo.begin() + indexToDestroy);
|
| 3032 | }
|
| 3033 | }
|
| 3034 |
|
| 3035 | // Test stack.
|
| 3036 | {
|
| 3037 | // Allocate number of buffers of varying size that surely fit into this block.
|
| 3038 | for(size_t i = 0; i < maxBufCount; ++i)
|
| 3039 | {
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3040 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 3041 | BufferInfo newBufInfo;
|
| 3042 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3043 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3044 | TEST(res == VK_SUCCESS);
|
| 3045 | TEST(i == 0 || allocInfo.offset > prevOffset);
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 3046 | bufInfo.push_back(newBufInfo);
|
| 3047 | prevOffset = allocInfo.offset;
|
| 3048 | }
|
| 3049 |
|
| 3050 | // Destroy few buffers from top of the stack.
|
| 3051 | for(size_t i = 0; i < maxBufCount / 5; ++i)
|
| 3052 | {
|
| 3053 | const BufferInfo& currBufInfo = bufInfo.back();
|
| 3054 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3055 | bufInfo.pop_back();
|
| 3056 | }
|
| 3057 |
|
| 3058 | // Create some more
|
| 3059 | for(size_t i = 0; i < maxBufCount / 5; ++i)
|
| 3060 | {
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3061 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 3062 | BufferInfo newBufInfo;
|
| 3063 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3064 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3065 | TEST(res == VK_SUCCESS);
|
| 3066 | TEST(i == 0 || allocInfo.offset > prevOffset);
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 3067 | bufInfo.push_back(newBufInfo);
|
| 3068 | prevOffset = allocInfo.offset;
|
| 3069 | }
|
| 3070 |
|
| 3071 | // Destroy the buffers in reverse order.
|
| 3072 | while(!bufInfo.empty())
|
| 3073 | {
|
| 3074 | const BufferInfo& currBufInfo = bufInfo.back();
|
| 3075 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3076 | bufInfo.pop_back();
|
| 3077 | }
|
| 3078 | }
|
| 3079 |
|
Adam Sawicki | ee08277 | 2018-06-20 17:45:49 +0200 | [diff] [blame] | 3080 | // Test ring buffer.
|
| 3081 | {
|
| 3082 | // Allocate number of buffers that surely fit into this block.
|
| 3083 | bufCreateInfo.size = bufSizeMax;
|
| 3084 | for(size_t i = 0; i < maxBufCount; ++i)
|
| 3085 | {
|
| 3086 | BufferInfo newBufInfo;
|
| 3087 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3088 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3089 | TEST(res == VK_SUCCESS);
|
| 3090 | TEST(i == 0 || allocInfo.offset > prevOffset);
|
Adam Sawicki | ee08277 | 2018-06-20 17:45:49 +0200 | [diff] [blame] | 3091 | bufInfo.push_back(newBufInfo);
|
| 3092 | prevOffset = allocInfo.offset;
|
| 3093 | }
|
| 3094 |
|
| 3095 | // Free and allocate new buffers so many times that we make sure we wrap-around at least once.
|
| 3096 | const size_t buffersPerIter = maxBufCount / 10 - 1;
|
| 3097 | const size_t iterCount = poolCreateInfo.blockSize / bufCreateInfo.size / buffersPerIter * 2;
|
| 3098 | for(size_t iter = 0; iter < iterCount; ++iter)
|
| 3099 | {
|
| 3100 | for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
|
| 3101 | {
|
| 3102 | const BufferInfo& currBufInfo = bufInfo.front();
|
| 3103 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3104 | bufInfo.erase(bufInfo.begin());
|
| 3105 | }
|
| 3106 | for(size_t bufPerIter = 0; bufPerIter < buffersPerIter; ++bufPerIter)
|
| 3107 | {
|
| 3108 | BufferInfo newBufInfo;
|
| 3109 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3110 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3111 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | ee08277 | 2018-06-20 17:45:49 +0200 | [diff] [blame] | 3112 | bufInfo.push_back(newBufInfo);
|
| 3113 | }
|
| 3114 | }
|
| 3115 |
|
| 3116 | // Allocate buffers until we reach out-of-memory.
|
| 3117 | uint32_t debugIndex = 0;
|
| 3118 | while(res == VK_SUCCESS)
|
| 3119 | {
|
| 3120 | BufferInfo newBufInfo;
|
| 3121 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3122 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
| 3123 | if(res == VK_SUCCESS)
|
| 3124 | {
|
| 3125 | bufInfo.push_back(newBufInfo);
|
| 3126 | }
|
| 3127 | else
|
| 3128 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3129 | TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
|
Adam Sawicki | ee08277 | 2018-06-20 17:45:49 +0200 | [diff] [blame] | 3130 | }
|
| 3131 | ++debugIndex;
|
| 3132 | }
|
| 3133 |
|
| 3134 | // Destroy the buffers in random order.
|
| 3135 | while(!bufInfo.empty())
|
| 3136 | {
|
| 3137 | const size_t indexToDestroy = rand.Generate() % bufInfo.size();
|
| 3138 | const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
|
| 3139 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3140 | bufInfo.erase(bufInfo.begin() + indexToDestroy);
|
| 3141 | }
|
| 3142 | }
|
| 3143 |
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3144 | // Test double stack.
|
| 3145 | {
|
| 3146 | // Allocate number of buffers of varying size that surely fit into this block, alternate from bottom/top.
|
| 3147 | VkDeviceSize prevOffsetLower = 0;
|
| 3148 | VkDeviceSize prevOffsetUpper = poolCreateInfo.blockSize;
|
| 3149 | for(size_t i = 0; i < maxBufCount; ++i)
|
| 3150 | {
|
| 3151 | const bool upperAddress = (i % 2) != 0;
|
| 3152 | if(upperAddress)
|
| 3153 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
|
| 3154 | else
|
| 3155 | allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3156 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3157 | BufferInfo newBufInfo;
|
| 3158 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3159 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3160 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3161 | if(upperAddress)
|
| 3162 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3163 | TEST(allocInfo.offset < prevOffsetUpper);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3164 | prevOffsetUpper = allocInfo.offset;
|
| 3165 | }
|
| 3166 | else
|
| 3167 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3168 | TEST(allocInfo.offset >= prevOffsetLower);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3169 | prevOffsetLower = allocInfo.offset;
|
| 3170 | }
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3171 | TEST(prevOffsetLower < prevOffsetUpper);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3172 | bufInfo.push_back(newBufInfo);
|
| 3173 | }
|
| 3174 |
|
| 3175 | // Destroy few buffers from top of the stack.
|
| 3176 | for(size_t i = 0; i < maxBufCount / 5; ++i)
|
| 3177 | {
|
| 3178 | const BufferInfo& currBufInfo = bufInfo.back();
|
| 3179 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3180 | bufInfo.pop_back();
|
| 3181 | }
|
| 3182 |
|
| 3183 | // Create some more
|
| 3184 | for(size_t i = 0; i < maxBufCount / 5; ++i)
|
| 3185 | {
|
| 3186 | const bool upperAddress = (i % 2) != 0;
|
| 3187 | if(upperAddress)
|
| 3188 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
|
| 3189 | else
|
| 3190 | allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3191 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3192 | BufferInfo newBufInfo;
|
| 3193 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3194 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3195 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3196 | bufInfo.push_back(newBufInfo);
|
| 3197 | }
|
| 3198 |
|
| 3199 | // Destroy the buffers in reverse order.
|
| 3200 | while(!bufInfo.empty())
|
| 3201 | {
|
| 3202 | const BufferInfo& currBufInfo = bufInfo.back();
|
| 3203 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3204 | bufInfo.pop_back();
|
| 3205 | }
|
| 3206 |
|
| 3207 | // Create buffers on both sides until we reach out of memory.
|
| 3208 | prevOffsetLower = 0;
|
| 3209 | prevOffsetUpper = poolCreateInfo.blockSize;
|
| 3210 | res = VK_SUCCESS;
|
| 3211 | for(size_t i = 0; res == VK_SUCCESS; ++i)
|
| 3212 | {
|
| 3213 | const bool upperAddress = (i % 2) != 0;
|
| 3214 | if(upperAddress)
|
| 3215 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
|
| 3216 | else
|
| 3217 | allocCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3218 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3219 | BufferInfo newBufInfo;
|
| 3220 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3221 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
| 3222 | if(res == VK_SUCCESS)
|
| 3223 | {
|
| 3224 | if(upperAddress)
|
| 3225 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3226 | TEST(allocInfo.offset < prevOffsetUpper);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3227 | prevOffsetUpper = allocInfo.offset;
|
| 3228 | }
|
| 3229 | else
|
| 3230 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3231 | TEST(allocInfo.offset >= prevOffsetLower);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3232 | prevOffsetLower = allocInfo.offset;
|
| 3233 | }
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3234 | TEST(prevOffsetLower < prevOffsetUpper);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3235 | bufInfo.push_back(newBufInfo);
|
| 3236 | }
|
| 3237 | }
|
| 3238 |
|
| 3239 | // Destroy the buffers in random order.
|
| 3240 | while(!bufInfo.empty())
|
| 3241 | {
|
| 3242 | const size_t indexToDestroy = rand.Generate() % bufInfo.size();
|
| 3243 | const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
|
| 3244 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3245 | bufInfo.erase(bufInfo.begin() + indexToDestroy);
|
| 3246 | }
|
| 3247 |
|
| 3248 | // Create buffers on upper side only, constant size, until we reach out of memory.
|
| 3249 | prevOffsetUpper = poolCreateInfo.blockSize;
|
| 3250 | res = VK_SUCCESS;
|
| 3251 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
|
| 3252 | bufCreateInfo.size = bufSizeMax;
|
| 3253 | for(size_t i = 0; res == VK_SUCCESS; ++i)
|
| 3254 | {
|
| 3255 | BufferInfo newBufInfo;
|
| 3256 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3257 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
| 3258 | if(res == VK_SUCCESS)
|
| 3259 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3260 | TEST(allocInfo.offset < prevOffsetUpper);
|
Adam Sawicki | 680b225 | 2018-08-22 14:47:32 +0200 | [diff] [blame] | 3261 | prevOffsetUpper = allocInfo.offset;
|
| 3262 | bufInfo.push_back(newBufInfo);
|
| 3263 | }
|
| 3264 | }
|
| 3265 |
|
| 3266 | // Destroy the buffers in reverse order.
|
| 3267 | while(!bufInfo.empty())
|
| 3268 | {
|
| 3269 | const BufferInfo& currBufInfo = bufInfo.back();
|
| 3270 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3271 | bufInfo.pop_back();
|
| 3272 | }
|
| 3273 | }
|
| 3274 |
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 3275 | // Test ring buffer with lost allocations.
|
| 3276 | {
|
| 3277 | // Allocate number of buffers until pool is full.
|
| 3278 | // Notice CAN_BECOME_LOST flag and call to vmaSetCurrentFrameIndex.
|
| 3279 | allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT;
|
| 3280 | res = VK_SUCCESS;
|
| 3281 | for(size_t i = 0; res == VK_SUCCESS; ++i)
|
| 3282 | {
|
| 3283 | vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
|
| 3284 |
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3285 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 3286 |
|
| 3287 | BufferInfo newBufInfo;
|
| 3288 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3289 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
| 3290 | if(res == VK_SUCCESS)
|
| 3291 | bufInfo.push_back(newBufInfo);
|
| 3292 | }
|
| 3293 |
|
| 3294 | // Free first half of it.
|
| 3295 | {
|
| 3296 | const size_t buffersToDelete = bufInfo.size() / 2;
|
| 3297 | for(size_t i = 0; i < buffersToDelete; ++i)
|
| 3298 | {
|
| 3299 | vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
|
| 3300 | }
|
| 3301 | bufInfo.erase(bufInfo.begin(), bufInfo.begin() + buffersToDelete);
|
| 3302 | }
|
| 3303 |
|
| 3304 | // Allocate number of buffers until pool is full again.
|
Adam Sawicki | 0ebdf0c | 2018-08-22 17:02:44 +0200 | [diff] [blame] | 3305 | // This way we make sure ring buffers wraps around, front in in the middle.
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 3306 | res = VK_SUCCESS;
|
| 3307 | for(size_t i = 0; res == VK_SUCCESS; ++i)
|
| 3308 | {
|
| 3309 | vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
|
| 3310 |
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3311 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 3312 |
|
| 3313 | BufferInfo newBufInfo;
|
| 3314 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3315 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
| 3316 | if(res == VK_SUCCESS)
|
| 3317 | bufInfo.push_back(newBufInfo);
|
| 3318 | }
|
| 3319 |
|
| 3320 | VkDeviceSize firstNewOffset;
|
| 3321 | {
|
| 3322 | vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
|
| 3323 |
|
| 3324 | // Allocate a large buffer with CAN_MAKE_OTHER_LOST.
|
| 3325 | allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
|
| 3326 | bufCreateInfo.size = bufSizeMax;
|
| 3327 |
|
| 3328 | BufferInfo newBufInfo;
|
| 3329 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3330 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3331 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 3332 | bufInfo.push_back(newBufInfo);
|
| 3333 | firstNewOffset = allocInfo.offset;
|
| 3334 |
|
| 3335 | // Make sure at least one buffer from the beginning became lost.
|
| 3336 | vmaGetAllocationInfo(g_hAllocator, bufInfo[0].Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3337 | TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 3338 | }
|
| 3339 |
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3340 | #if 0 // TODO Fix and uncomment. Failing on Intel.
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 3341 | // Allocate more buffers that CAN_MAKE_OTHER_LOST until we wrap-around with this.
|
| 3342 | size_t newCount = 1;
|
| 3343 | for(;;)
|
| 3344 | {
|
| 3345 | vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
|
| 3346 |
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3347 | bufCreateInfo.size = align_up<VkDeviceSize>(bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin), 16);
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 3348 |
|
| 3349 | BufferInfo newBufInfo;
|
| 3350 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3351 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3352 |
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3353 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 3354 | bufInfo.push_back(newBufInfo);
|
| 3355 | ++newCount;
|
| 3356 | if(allocInfo.offset < firstNewOffset)
|
| 3357 | break;
|
| 3358 | }
|
Adam Sawicki | fd366b6 | 2019-01-24 15:26:43 +0100 | [diff] [blame] | 3359 | #endif
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 3360 |
|
Adam Sawicki | 0ebdf0c | 2018-08-22 17:02:44 +0200 | [diff] [blame] | 3361 | // Delete buffers that are lost.
|
| 3362 | for(size_t i = bufInfo.size(); i--; )
|
| 3363 | {
|
| 3364 | vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
|
| 3365 | if(allocInfo.deviceMemory == VK_NULL_HANDLE)
|
| 3366 | {
|
| 3367 | vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
|
| 3368 | bufInfo.erase(bufInfo.begin() + i);
|
| 3369 | }
|
| 3370 | }
|
| 3371 |
|
| 3372 | // Test vmaMakePoolAllocationsLost
|
| 3373 | {
|
| 3374 | vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
|
| 3375 |
|
Adam Sawicki | 4d35a5d | 2019-01-24 15:51:59 +0100 | [diff] [blame] | 3376 | size_t lostAllocCount = 0;
|
Adam Sawicki | 0ebdf0c | 2018-08-22 17:02:44 +0200 | [diff] [blame] | 3377 | vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostAllocCount);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3378 | TEST(lostAllocCount > 0);
|
Adam Sawicki | 0ebdf0c | 2018-08-22 17:02:44 +0200 | [diff] [blame] | 3379 |
|
| 3380 | size_t realLostAllocCount = 0;
|
| 3381 | for(size_t i = 0; i < bufInfo.size(); ++i)
|
| 3382 | {
|
| 3383 | vmaGetAllocationInfo(g_hAllocator, bufInfo[i].Allocation, &allocInfo);
|
| 3384 | if(allocInfo.deviceMemory == VK_NULL_HANDLE)
|
| 3385 | ++realLostAllocCount;
|
| 3386 | }
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3387 | TEST(realLostAllocCount == lostAllocCount);
|
Adam Sawicki | 0ebdf0c | 2018-08-22 17:02:44 +0200 | [diff] [blame] | 3388 | }
|
| 3389 |
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 3390 | // Destroy all the buffers in forward order.
|
| 3391 | for(size_t i = 0; i < bufInfo.size(); ++i)
|
| 3392 | vmaDestroyBuffer(g_hAllocator, bufInfo[i].Buffer, bufInfo[i].Allocation);
|
| 3393 | bufInfo.clear();
|
| 3394 | }
|
| 3395 |
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3396 | vmaDestroyPool(g_hAllocator, pool);
|
| 3397 | }
|
Adam Sawicki | f799c4f | 2018-08-23 10:40:30 +0200 | [diff] [blame] | 3398 |
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3399 | static void TestLinearAllocatorMultiBlock()
|
| 3400 | {
|
| 3401 | wprintf(L"Test linear allocator multi block\n");
|
| 3402 |
|
| 3403 | RandomNumberGenerator rand{345673};
|
| 3404 |
|
| 3405 | VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 3406 | sampleBufCreateInfo.size = 1024 * 1024;
|
| 3407 | sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 3408 |
|
| 3409 | VmaAllocationCreateInfo sampleAllocCreateInfo = {};
|
| 3410 | sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 3411 |
|
| 3412 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 3413 | poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
|
| 3414 | VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3415 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3416 |
|
| 3417 | VmaPool pool = nullptr;
|
| 3418 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3419 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3420 |
|
| 3421 | VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
|
| 3422 |
|
| 3423 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 3424 | allocCreateInfo.pool = pool;
|
| 3425 |
|
| 3426 | std::vector<BufferInfo> bufInfo;
|
| 3427 | VmaAllocationInfo allocInfo;
|
| 3428 |
|
| 3429 | // Test one-time free.
|
| 3430 | {
|
| 3431 | // Allocate buffers until we move to a second block.
|
| 3432 | VkDeviceMemory lastMem = VK_NULL_HANDLE;
|
| 3433 | for(uint32_t i = 0; ; ++i)
|
| 3434 | {
|
| 3435 | BufferInfo newBufInfo;
|
| 3436 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3437 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3438 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3439 | bufInfo.push_back(newBufInfo);
|
| 3440 | if(lastMem && allocInfo.deviceMemory != lastMem)
|
| 3441 | {
|
| 3442 | break;
|
| 3443 | }
|
| 3444 | lastMem = allocInfo.deviceMemory;
|
| 3445 | }
|
| 3446 |
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3447 | TEST(bufInfo.size() > 2);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3448 |
|
| 3449 | // Make sure that pool has now two blocks.
|
| 3450 | VmaPoolStats poolStats = {};
|
| 3451 | vmaGetPoolStats(g_hAllocator, pool, &poolStats);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3452 | TEST(poolStats.blockCount == 2);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3453 |
|
| 3454 | // Destroy all the buffers in random order.
|
| 3455 | while(!bufInfo.empty())
|
| 3456 | {
|
| 3457 | const size_t indexToDestroy = rand.Generate() % bufInfo.size();
|
| 3458 | const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
|
| 3459 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3460 | bufInfo.erase(bufInfo.begin() + indexToDestroy);
|
| 3461 | }
|
| 3462 |
|
| 3463 | // Make sure that pool has now at most one block.
|
| 3464 | vmaGetPoolStats(g_hAllocator, pool, &poolStats);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3465 | TEST(poolStats.blockCount <= 1);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3466 | }
|
| 3467 |
|
| 3468 | // Test stack.
|
| 3469 | {
|
| 3470 | // Allocate buffers until we move to a second block.
|
| 3471 | VkDeviceMemory lastMem = VK_NULL_HANDLE;
|
| 3472 | for(uint32_t i = 0; ; ++i)
|
| 3473 | {
|
| 3474 | BufferInfo newBufInfo;
|
| 3475 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3476 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3477 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3478 | bufInfo.push_back(newBufInfo);
|
| 3479 | if(lastMem && allocInfo.deviceMemory != lastMem)
|
| 3480 | {
|
| 3481 | break;
|
| 3482 | }
|
| 3483 | lastMem = allocInfo.deviceMemory;
|
| 3484 | }
|
| 3485 |
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3486 | TEST(bufInfo.size() > 2);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3487 |
|
| 3488 | // Add few more buffers.
|
| 3489 | for(uint32_t i = 0; i < 5; ++i)
|
| 3490 | {
|
| 3491 | BufferInfo newBufInfo;
|
| 3492 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3493 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3494 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3495 | bufInfo.push_back(newBufInfo);
|
| 3496 | }
|
| 3497 |
|
| 3498 | // Make sure that pool has now two blocks.
|
| 3499 | VmaPoolStats poolStats = {};
|
| 3500 | vmaGetPoolStats(g_hAllocator, pool, &poolStats);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3501 | TEST(poolStats.blockCount == 2);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3502 |
|
| 3503 | // Delete half of buffers, LIFO.
|
| 3504 | for(size_t i = 0, countToDelete = bufInfo.size() / 2; i < countToDelete; ++i)
|
| 3505 | {
|
| 3506 | const BufferInfo& currBufInfo = bufInfo.back();
|
| 3507 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3508 | bufInfo.pop_back();
|
| 3509 | }
|
| 3510 |
|
| 3511 | // Add one more buffer.
|
| 3512 | BufferInfo newBufInfo;
|
| 3513 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3514 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3515 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3516 | bufInfo.push_back(newBufInfo);
|
| 3517 |
|
| 3518 | // Make sure that pool has now one block.
|
| 3519 | vmaGetPoolStats(g_hAllocator, pool, &poolStats);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3520 | TEST(poolStats.blockCount == 1);
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 3521 |
|
| 3522 | // Delete all the remaining buffers, LIFO.
|
| 3523 | while(!bufInfo.empty())
|
| 3524 | {
|
| 3525 | const BufferInfo& currBufInfo = bufInfo.back();
|
| 3526 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3527 | bufInfo.pop_back();
|
| 3528 | }
|
Adam Sawicki | f799c4f | 2018-08-23 10:40:30 +0200 | [diff] [blame] | 3529 | }
|
| 3530 |
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 3531 | vmaDestroyPool(g_hAllocator, pool);
|
| 3532 | }
|
| 3533 |
|
Adam Sawicki | fd11d75 | 2018-08-22 15:02:10 +0200 | [diff] [blame] | 3534 | static void ManuallyTestLinearAllocator()
|
| 3535 | {
|
| 3536 | VmaStats origStats;
|
| 3537 | vmaCalculateStats(g_hAllocator, &origStats);
|
| 3538 |
|
| 3539 | wprintf(L"Manually test linear allocator\n");
|
| 3540 |
|
| 3541 | RandomNumberGenerator rand{645332};
|
| 3542 |
|
| 3543 | VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 3544 | sampleBufCreateInfo.size = 1024; // Whatever.
|
| 3545 | sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
|
| 3546 |
|
| 3547 | VmaAllocationCreateInfo sampleAllocCreateInfo = {};
|
| 3548 | sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 3549 |
|
| 3550 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 3551 | VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3552 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | fd11d75 | 2018-08-22 15:02:10 +0200 | [diff] [blame] | 3553 |
|
| 3554 | poolCreateInfo.blockSize = 10 * 1024;
|
| 3555 | poolCreateInfo.flags = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
|
| 3556 | poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
|
| 3557 |
|
| 3558 | VmaPool pool = nullptr;
|
| 3559 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3560 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | fd11d75 | 2018-08-22 15:02:10 +0200 | [diff] [blame] | 3561 |
|
| 3562 | VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
|
| 3563 |
|
| 3564 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 3565 | allocCreateInfo.pool = pool;
|
| 3566 |
|
| 3567 | std::vector<BufferInfo> bufInfo;
|
| 3568 | VmaAllocationInfo allocInfo;
|
| 3569 | BufferInfo newBufInfo;
|
| 3570 |
|
| 3571 | // Test double stack.
|
| 3572 | {
|
| 3573 | /*
|
| 3574 | Lower: Buffer 32 B, Buffer 1024 B, Buffer 32 B
|
| 3575 | Upper: Buffer 16 B, Buffer 1024 B, Buffer 128 B
|
| 3576 |
|
| 3577 | Totally:
|
| 3578 | 1 block allocated
|
| 3579 | 10240 Vulkan bytes
|
| 3580 | 6 new allocations
|
| 3581 | 2256 bytes in allocations
|
| 3582 | */
|
| 3583 |
|
| 3584 | bufCreateInfo.size = 32;
|
| 3585 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3586 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3587 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | fd11d75 | 2018-08-22 15:02:10 +0200 | [diff] [blame] | 3588 | bufInfo.push_back(newBufInfo);
|
| 3589 |
|
| 3590 | bufCreateInfo.size = 1024;
|
| 3591 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3592 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3593 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | fd11d75 | 2018-08-22 15:02:10 +0200 | [diff] [blame] | 3594 | bufInfo.push_back(newBufInfo);
|
| 3595 |
|
| 3596 | bufCreateInfo.size = 32;
|
| 3597 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3598 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3599 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | fd11d75 | 2018-08-22 15:02:10 +0200 | [diff] [blame] | 3600 | bufInfo.push_back(newBufInfo);
|
| 3601 |
|
| 3602 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT;
|
| 3603 |
|
| 3604 | bufCreateInfo.size = 128;
|
| 3605 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3606 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3607 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | fd11d75 | 2018-08-22 15:02:10 +0200 | [diff] [blame] | 3608 | bufInfo.push_back(newBufInfo);
|
| 3609 |
|
| 3610 | bufCreateInfo.size = 1024;
|
| 3611 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3612 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3613 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | fd11d75 | 2018-08-22 15:02:10 +0200 | [diff] [blame] | 3614 | bufInfo.push_back(newBufInfo);
|
| 3615 |
|
| 3616 | bufCreateInfo.size = 16;
|
| 3617 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3618 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3619 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | fd11d75 | 2018-08-22 15:02:10 +0200 | [diff] [blame] | 3620 | bufInfo.push_back(newBufInfo);
|
| 3621 |
|
| 3622 | VmaStats currStats;
|
| 3623 | vmaCalculateStats(g_hAllocator, &currStats);
|
| 3624 | VmaPoolStats poolStats;
|
| 3625 | vmaGetPoolStats(g_hAllocator, pool, &poolStats);
|
| 3626 |
|
| 3627 | char* statsStr = nullptr;
|
| 3628 | vmaBuildStatsString(g_hAllocator, &statsStr, VK_TRUE);
|
| 3629 |
|
| 3630 | // PUT BREAKPOINT HERE TO CHECK.
|
| 3631 | // Inspect: currStats versus origStats, poolStats, statsStr.
|
| 3632 | int I = 0;
|
| 3633 |
|
| 3634 | vmaFreeStatsString(g_hAllocator, statsStr);
|
| 3635 |
|
| 3636 | // Destroy the buffers in reverse order.
|
| 3637 | while(!bufInfo.empty())
|
| 3638 | {
|
| 3639 | const BufferInfo& currBufInfo = bufInfo.back();
|
| 3640 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 3641 | bufInfo.pop_back();
|
| 3642 | }
|
| 3643 | }
|
| 3644 |
|
| 3645 | vmaDestroyPool(g_hAllocator, pool);
|
| 3646 | }
|
| 3647 |
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3648 | static void BenchmarkAlgorithmsCase(FILE* file,
|
| 3649 | uint32_t algorithm,
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 3650 | bool empty,
|
| 3651 | VmaAllocationCreateFlags allocStrategy,
|
| 3652 | FREE_ORDER freeOrder)
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3653 | {
|
| 3654 | RandomNumberGenerator rand{16223};
|
| 3655 |
|
| 3656 | const VkDeviceSize bufSizeMin = 32;
|
| 3657 | const VkDeviceSize bufSizeMax = 1024;
|
| 3658 | const size_t maxBufCapacity = 10000;
|
| 3659 | const uint32_t iterationCount = 10;
|
| 3660 |
|
| 3661 | VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 3662 | sampleBufCreateInfo.size = bufSizeMax;
|
| 3663 | sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
|
| 3664 |
|
| 3665 | VmaAllocationCreateInfo sampleAllocCreateInfo = {};
|
| 3666 | sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 3667 |
|
| 3668 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 3669 | VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3670 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3671 |
|
| 3672 | poolCreateInfo.blockSize = bufSizeMax * maxBufCapacity;
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3673 | poolCreateInfo.flags |= algorithm;
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3674 | poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
|
| 3675 |
|
| 3676 | VmaPool pool = nullptr;
|
| 3677 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3678 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3679 |
|
| 3680 | // Buffer created just to get memory requirements. Never bound to any memory.
|
| 3681 | VkBuffer dummyBuffer = VK_NULL_HANDLE;
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 3682 | res = vkCreateBuffer(g_hDevice, &sampleBufCreateInfo, g_Allocs, &dummyBuffer);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3683 | TEST(res == VK_SUCCESS && dummyBuffer);
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3684 |
|
| 3685 | VkMemoryRequirements memReq = {};
|
| 3686 | vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
|
| 3687 |
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 3688 | vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3689 |
|
| 3690 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 3691 | allocCreateInfo.pool = pool;
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 3692 | allocCreateInfo.flags = allocStrategy;
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3693 |
|
| 3694 | VmaAllocation alloc;
|
| 3695 | std::vector<VmaAllocation> baseAllocations;
|
| 3696 |
|
| 3697 | if(!empty)
|
| 3698 | {
|
Adam Sawicki | 1f7f8af | 2018-10-03 17:37:55 +0200 | [diff] [blame] | 3699 | // Make allocations up to 1/3 of pool size.
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3700 | VkDeviceSize totalSize = 0;
|
Adam Sawicki | 1f7f8af | 2018-10-03 17:37:55 +0200 | [diff] [blame] | 3701 | while(totalSize < poolCreateInfo.blockSize / 3)
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3702 | {
|
Adam Sawicki | 4d844e2 | 2019-01-24 16:21:05 +0100 | [diff] [blame] | 3703 | // This test intentionally allows sizes that are aligned to 4 or 16 bytes.
|
| 3704 | // This is theoretically allowed and already uncovered one bug.
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3705 | memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
|
| 3706 | res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3707 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3708 | baseAllocations.push_back(alloc);
|
| 3709 | totalSize += memReq.size;
|
| 3710 | }
|
| 3711 |
|
| 3712 | // Delete half of them, choose randomly.
|
| 3713 | size_t allocsToDelete = baseAllocations.size() / 2;
|
| 3714 | for(size_t i = 0; i < allocsToDelete; ++i)
|
| 3715 | {
|
| 3716 | const size_t index = (size_t)rand.Generate() % baseAllocations.size();
|
| 3717 | vmaFreeMemory(g_hAllocator, baseAllocations[index]);
|
| 3718 | baseAllocations.erase(baseAllocations.begin() + index);
|
| 3719 | }
|
| 3720 | }
|
| 3721 |
|
| 3722 | // BENCHMARK
|
Adam Sawicki | 1f7f8af | 2018-10-03 17:37:55 +0200 | [diff] [blame] | 3723 | const size_t allocCount = maxBufCapacity / 3;
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3724 | std::vector<VmaAllocation> testAllocations;
|
| 3725 | testAllocations.reserve(allocCount);
|
| 3726 | duration allocTotalDuration = duration::zero();
|
| 3727 | duration freeTotalDuration = duration::zero();
|
| 3728 | for(uint32_t iterationIndex = 0; iterationIndex < iterationCount; ++iterationIndex)
|
| 3729 | {
|
| 3730 | // Allocations
|
| 3731 | time_point allocTimeBeg = std::chrono::high_resolution_clock::now();
|
| 3732 | for(size_t i = 0; i < allocCount; ++i)
|
| 3733 | {
|
| 3734 | memReq.size = bufSizeMin + rand.Generate() % (bufSizeMax - bufSizeMin);
|
| 3735 | res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3736 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3737 | testAllocations.push_back(alloc);
|
| 3738 | }
|
| 3739 | allocTotalDuration += std::chrono::high_resolution_clock::now() - allocTimeBeg;
|
| 3740 |
|
| 3741 | // Deallocations
|
| 3742 | switch(freeOrder)
|
| 3743 | {
|
| 3744 | case FREE_ORDER::FORWARD:
|
| 3745 | // Leave testAllocations unchanged.
|
| 3746 | break;
|
| 3747 | case FREE_ORDER::BACKWARD:
|
| 3748 | std::reverse(testAllocations.begin(), testAllocations.end());
|
| 3749 | break;
|
| 3750 | case FREE_ORDER::RANDOM:
|
| 3751 | std::shuffle(testAllocations.begin(), testAllocations.end(), MyUniformRandomNumberGenerator(rand));
|
| 3752 | break;
|
| 3753 | default: assert(0);
|
| 3754 | }
|
| 3755 |
|
| 3756 | time_point freeTimeBeg = std::chrono::high_resolution_clock::now();
|
| 3757 | for(size_t i = 0; i < allocCount; ++i)
|
| 3758 | vmaFreeMemory(g_hAllocator, testAllocations[i]);
|
| 3759 | freeTotalDuration += std::chrono::high_resolution_clock::now() - freeTimeBeg;
|
| 3760 |
|
| 3761 | testAllocations.clear();
|
| 3762 | }
|
| 3763 |
|
| 3764 | // Delete baseAllocations
|
| 3765 | while(!baseAllocations.empty())
|
| 3766 | {
|
| 3767 | vmaFreeMemory(g_hAllocator, baseAllocations.back());
|
| 3768 | baseAllocations.pop_back();
|
| 3769 | }
|
| 3770 |
|
| 3771 | vmaDestroyPool(g_hAllocator, pool);
|
| 3772 |
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 3773 | const float allocTotalSeconds = ToFloatSeconds(allocTotalDuration);
|
| 3774 | const float freeTotalSeconds = ToFloatSeconds(freeTotalDuration);
|
| 3775 |
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3776 | printf(" Algorithm=%s %s Allocation=%s FreeOrder=%s: allocations %g s, free %g s\n",
|
| 3777 | AlgorithmToStr(algorithm),
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 3778 | empty ? "Empty" : "Not empty",
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 3779 | GetAllocationStrategyName(allocStrategy),
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3780 | FREE_ORDER_NAMES[(size_t)freeOrder],
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 3781 | allocTotalSeconds,
|
| 3782 | freeTotalSeconds);
|
| 3783 |
|
| 3784 | if(file)
|
| 3785 | {
|
| 3786 | std::string currTime;
|
| 3787 | CurrentTimeToStr(currTime);
|
| 3788 |
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3789 | fprintf(file, "%s,%s,%s,%u,%s,%s,%g,%g\n",
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 3790 | CODE_DESCRIPTION, currTime.c_str(),
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3791 | AlgorithmToStr(algorithm),
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 3792 | empty ? 1 : 0,
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 3793 | GetAllocationStrategyName(allocStrategy),
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 3794 | FREE_ORDER_NAMES[(uint32_t)freeOrder],
|
| 3795 | allocTotalSeconds,
|
| 3796 | freeTotalSeconds);
|
| 3797 | }
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3798 | }
|
| 3799 |
|
Adam Sawicki | e73e988 | 2020-03-20 18:05:42 +0100 | [diff] [blame] | 3800 | static void TestBufferDeviceAddress()
|
| 3801 | {
|
| 3802 | wprintf(L"Test buffer device address\n");
|
| 3803 |
|
| 3804 | assert(g_BufferDeviceAddressEnabled);
|
| 3805 |
|
| 3806 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 3807 | bufCreateInfo.size = 0x10000;
|
| 3808 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT |
|
| 3809 | VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT; // !!!
|
| 3810 |
|
| 3811 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 3812 | allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 3813 |
|
| 3814 | for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
|
| 3815 | {
|
| 3816 | // 1st is placed, 2nd is dedicated.
|
| 3817 | if(testIndex == 1)
|
| 3818 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
|
| 3819 |
|
| 3820 | BufferInfo bufInfo = {};
|
| 3821 | VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3822 | &bufInfo.Buffer, &bufInfo.Allocation, nullptr);
|
| 3823 | TEST(res == VK_SUCCESS);
|
| 3824 |
|
| 3825 | VkBufferDeviceAddressInfoEXT bufferDeviceAddressInfo = { VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT };
|
| 3826 | bufferDeviceAddressInfo.buffer = bufInfo.Buffer;
|
| 3827 | //assert(g_vkGetBufferDeviceAddressEXT != nullptr);
|
| 3828 | if(g_vkGetBufferDeviceAddressEXT != nullptr)
|
| 3829 | {
|
| 3830 | VkDeviceAddress addr = g_vkGetBufferDeviceAddressEXT(g_hDevice, &bufferDeviceAddressInfo);
|
| 3831 | TEST(addr != 0);
|
| 3832 | }
|
| 3833 |
|
| 3834 | vmaDestroyBuffer(g_hAllocator, bufInfo.Buffer, bufInfo.Allocation);
|
| 3835 | }
|
| 3836 | }
|
| 3837 |
|
Adam Sawicki | f201205 | 2021-01-11 18:04:42 +0100 | [diff] [blame] | 3838 | static void TestMemoryPriority()
|
| 3839 | {
|
| 3840 | wprintf(L"Test memory priority\n");
|
| 3841 |
|
| 3842 | assert(VK_EXT_memory_priority_enabled);
|
| 3843 |
|
| 3844 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 3845 | bufCreateInfo.size = 0x10000;
|
| 3846 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
|
| 3847 |
|
| 3848 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 3849 | allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 3850 | allocCreateInfo.priority = 1.f;
|
| 3851 |
|
| 3852 | for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
|
| 3853 | {
|
| 3854 | // 1st is placed, 2nd is dedicated.
|
| 3855 | if(testIndex == 1)
|
| 3856 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
|
| 3857 |
|
| 3858 | BufferInfo bufInfo = {};
|
| 3859 | VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 3860 | &bufInfo.Buffer, &bufInfo.Allocation, nullptr);
|
| 3861 | TEST(res == VK_SUCCESS);
|
| 3862 |
|
| 3863 | // There is nothing we can do to validate the priority.
|
| 3864 |
|
| 3865 | vmaDestroyBuffer(g_hAllocator, bufInfo.Buffer, bufInfo.Allocation);
|
| 3866 | }
|
| 3867 | }
|
| 3868 |
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3869 | static void BenchmarkAlgorithms(FILE* file)
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3870 | {
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3871 | wprintf(L"Benchmark algorithms\n");
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3872 |
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 3873 | if(file)
|
| 3874 | {
|
| 3875 | fprintf(file,
|
| 3876 | "Code,Time,"
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3877 | "Algorithm,Empty,Allocation strategy,Free order,"
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 3878 | "Allocation time (s),Deallocation time (s)\n");
|
| 3879 | }
|
| 3880 |
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3881 | uint32_t freeOrderCount = 1;
|
| 3882 | if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_LARGE)
|
| 3883 | freeOrderCount = 3;
|
| 3884 | else if(ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL)
|
| 3885 | freeOrderCount = 2;
|
| 3886 |
|
| 3887 | const uint32_t emptyCount = ConfigType >= CONFIG_TYPE::CONFIG_TYPE_SMALL ? 2 : 1;
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 3888 | const uint32_t allocStrategyCount = GetAllocationStrategyCount();
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3889 |
|
| 3890 | for(uint32_t freeOrderIndex = 0; freeOrderIndex < freeOrderCount; ++freeOrderIndex)
|
| 3891 | {
|
| 3892 | FREE_ORDER freeOrder = FREE_ORDER::COUNT;
|
| 3893 | switch(freeOrderIndex)
|
| 3894 | {
|
| 3895 | case 0: freeOrder = FREE_ORDER::BACKWARD; break;
|
| 3896 | case 1: freeOrder = FREE_ORDER::FORWARD; break;
|
| 3897 | case 2: freeOrder = FREE_ORDER::RANDOM; break;
|
| 3898 | default: assert(0);
|
| 3899 | }
|
| 3900 |
|
| 3901 | for(uint32_t emptyIndex = 0; emptyIndex < emptyCount; ++emptyIndex)
|
| 3902 | {
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3903 | for(uint32_t algorithmIndex = 0; algorithmIndex < 3; ++algorithmIndex)
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3904 | {
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3905 | uint32_t algorithm = 0;
|
| 3906 | switch(algorithmIndex)
|
| 3907 | {
|
| 3908 | case 0:
|
| 3909 | break;
|
| 3910 | case 1:
|
| 3911 | algorithm = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
|
| 3912 | break;
|
| 3913 | case 2:
|
| 3914 | algorithm = VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT;
|
| 3915 | break;
|
| 3916 | default:
|
| 3917 | assert(0);
|
| 3918 | }
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 3919 |
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3920 | uint32_t currAllocStrategyCount = algorithm != 0 ? 1 : allocStrategyCount;
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 3921 | for(uint32_t allocStrategyIndex = 0; allocStrategyIndex < currAllocStrategyCount; ++allocStrategyIndex)
|
| 3922 | {
|
| 3923 | VmaAllocatorCreateFlags strategy = 0;
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3924 | if(currAllocStrategyCount > 1)
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 3925 | {
|
| 3926 | switch(allocStrategyIndex)
|
| 3927 | {
|
| 3928 | case 0: strategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT; break;
|
| 3929 | case 1: strategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT; break;
|
| 3930 | case 2: strategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT; break;
|
| 3931 | default: assert(0);
|
| 3932 | }
|
| 3933 | }
|
| 3934 |
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3935 | BenchmarkAlgorithmsCase(
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 3936 | file,
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 3937 | algorithm,
|
Adam Sawicki | 1f7f8af | 2018-10-03 17:37:55 +0200 | [diff] [blame] | 3938 | (emptyIndex == 0), // empty
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 3939 | strategy,
|
| 3940 | freeOrder); // freeOrder
|
| 3941 | }
|
Adam Sawicki | 0a60713 | 2018-08-24 11:18:41 +0200 | [diff] [blame] | 3942 | }
|
| 3943 | }
|
| 3944 | }
|
| 3945 | }
|
| 3946 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 3947 | static void TestPool_SameSize()
|
| 3948 | {
|
| 3949 | const VkDeviceSize BUF_SIZE = 1024 * 1024;
|
| 3950 | const size_t BUF_COUNT = 100;
|
| 3951 | VkResult res;
|
| 3952 |
|
| 3953 | RandomNumberGenerator rand{123};
|
| 3954 |
|
| 3955 | VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 3956 | bufferInfo.size = BUF_SIZE;
|
| 3957 | bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
| 3958 |
|
| 3959 | uint32_t memoryTypeBits = UINT32_MAX;
|
| 3960 | {
|
| 3961 | VkBuffer dummyBuffer;
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 3962 | res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3963 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 3964 |
|
| 3965 | VkMemoryRequirements memReq;
|
| 3966 | vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
|
| 3967 | memoryTypeBits = memReq.memoryTypeBits;
|
| 3968 |
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 3969 | vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 3970 | }
|
| 3971 |
|
| 3972 | VmaAllocationCreateInfo poolAllocInfo = {};
|
| 3973 | poolAllocInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 3974 | uint32_t memTypeIndex;
|
| 3975 | res = vmaFindMemoryTypeIndex(
|
| 3976 | g_hAllocator,
|
| 3977 | memoryTypeBits,
|
| 3978 | &poolAllocInfo,
|
| 3979 | &memTypeIndex);
|
| 3980 |
|
| 3981 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 3982 | poolCreateInfo.memoryTypeIndex = memTypeIndex;
|
| 3983 | poolCreateInfo.blockSize = BUF_SIZE * BUF_COUNT / 4;
|
| 3984 | poolCreateInfo.minBlockCount = 1;
|
| 3985 | poolCreateInfo.maxBlockCount = 4;
|
| 3986 | poolCreateInfo.frameInUseCount = 0;
|
| 3987 |
|
| 3988 | VmaPool pool;
|
| 3989 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 3990 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 3991 |
|
Adam Sawicki | a020fb8 | 2019-11-02 14:43:06 +0100 | [diff] [blame] | 3992 | // Test pool name
|
| 3993 | {
|
| 3994 | static const char* const POOL_NAME = "Pool name";
|
| 3995 | vmaSetPoolName(g_hAllocator, pool, POOL_NAME);
|
| 3996 |
|
| 3997 | const char* fetchedPoolName = nullptr;
|
| 3998 | vmaGetPoolName(g_hAllocator, pool, &fetchedPoolName);
|
| 3999 | TEST(strcmp(fetchedPoolName, POOL_NAME) == 0);
|
| 4000 |
|
Adam Sawicki | a020fb8 | 2019-11-02 14:43:06 +0100 | [diff] [blame] | 4001 | vmaSetPoolName(g_hAllocator, pool, nullptr);
|
| 4002 | }
|
| 4003 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4004 | vmaSetCurrentFrameIndex(g_hAllocator, 1);
|
| 4005 |
|
| 4006 | VmaAllocationCreateInfo allocInfo = {};
|
| 4007 | allocInfo.pool = pool;
|
| 4008 | allocInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
|
| 4009 | VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
|
| 4010 |
|
| 4011 | struct BufItem
|
| 4012 | {
|
| 4013 | VkBuffer Buf;
|
| 4014 | VmaAllocation Alloc;
|
| 4015 | };
|
| 4016 | std::vector<BufItem> items;
|
| 4017 |
|
| 4018 | // Fill entire pool.
|
| 4019 | for(size_t i = 0; i < BUF_COUNT; ++i)
|
| 4020 | {
|
| 4021 | BufItem item;
|
| 4022 | res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4023 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4024 | items.push_back(item);
|
| 4025 | }
|
| 4026 |
|
| 4027 | // Make sure that another allocation would fail.
|
| 4028 | {
|
| 4029 | BufItem item;
|
| 4030 | res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4031 | TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4032 | }
|
| 4033 |
|
| 4034 | // Validate that no buffer is lost. Also check that they are not mapped.
|
| 4035 | for(size_t i = 0; i < items.size(); ++i)
|
| 4036 | {
|
| 4037 | VmaAllocationInfo allocInfo;
|
| 4038 | vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4039 | TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
|
| 4040 | TEST(allocInfo.pMappedData == nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4041 | }
|
| 4042 |
|
| 4043 | // Free some percent of random items.
|
| 4044 | {
|
| 4045 | const size_t PERCENT_TO_FREE = 10;
|
| 4046 | size_t itemsToFree = items.size() * PERCENT_TO_FREE / 100;
|
| 4047 | for(size_t i = 0; i < itemsToFree; ++i)
|
| 4048 | {
|
| 4049 | size_t index = (size_t)rand.Generate() % items.size();
|
| 4050 | vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
|
| 4051 | items.erase(items.begin() + index);
|
| 4052 | }
|
| 4053 | }
|
| 4054 |
|
| 4055 | // Randomly allocate and free items.
|
| 4056 | {
|
| 4057 | const size_t OPERATION_COUNT = BUF_COUNT;
|
| 4058 | for(size_t i = 0; i < OPERATION_COUNT; ++i)
|
| 4059 | {
|
| 4060 | bool allocate = rand.Generate() % 2 != 0;
|
| 4061 | if(allocate)
|
| 4062 | {
|
| 4063 | if(items.size() < BUF_COUNT)
|
| 4064 | {
|
| 4065 | BufItem item;
|
| 4066 | res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4067 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4068 | items.push_back(item);
|
| 4069 | }
|
| 4070 | }
|
| 4071 | else // Free
|
| 4072 | {
|
| 4073 | if(!items.empty())
|
| 4074 | {
|
| 4075 | size_t index = (size_t)rand.Generate() % items.size();
|
| 4076 | vmaDestroyBuffer(g_hAllocator, items[index].Buf, items[index].Alloc);
|
| 4077 | items.erase(items.begin() + index);
|
| 4078 | }
|
| 4079 | }
|
| 4080 | }
|
| 4081 | }
|
| 4082 |
|
| 4083 | // Allocate up to maximum.
|
| 4084 | while(items.size() < BUF_COUNT)
|
| 4085 | {
|
| 4086 | BufItem item;
|
| 4087 | res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4088 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4089 | items.push_back(item);
|
| 4090 | }
|
| 4091 |
|
| 4092 | // Validate that no buffer is lost.
|
| 4093 | for(size_t i = 0; i < items.size(); ++i)
|
| 4094 | {
|
| 4095 | VmaAllocationInfo allocInfo;
|
| 4096 | vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4097 | TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4098 | }
|
| 4099 |
|
| 4100 | // Next frame.
|
| 4101 | vmaSetCurrentFrameIndex(g_hAllocator, 2);
|
| 4102 |
|
| 4103 | // Allocate another BUF_COUNT buffers.
|
| 4104 | for(size_t i = 0; i < BUF_COUNT; ++i)
|
| 4105 | {
|
| 4106 | BufItem item;
|
| 4107 | res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4108 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4109 | items.push_back(item);
|
| 4110 | }
|
| 4111 |
|
| 4112 | // Make sure the first BUF_COUNT is lost. Delete them.
|
| 4113 | for(size_t i = 0; i < BUF_COUNT; ++i)
|
| 4114 | {
|
| 4115 | VmaAllocationInfo allocInfo;
|
| 4116 | vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4117 | TEST(allocInfo.deviceMemory == VK_NULL_HANDLE);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4118 | vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
|
| 4119 | }
|
| 4120 | items.erase(items.begin(), items.begin() + BUF_COUNT);
|
| 4121 |
|
| 4122 | // Validate that no buffer is lost.
|
| 4123 | for(size_t i = 0; i < items.size(); ++i)
|
| 4124 | {
|
| 4125 | VmaAllocationInfo allocInfo;
|
| 4126 | vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4127 | TEST(allocInfo.deviceMemory != VK_NULL_HANDLE);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4128 | }
|
| 4129 |
|
| 4130 | // Free one item.
|
| 4131 | vmaDestroyBuffer(g_hAllocator, items.back().Buf, items.back().Alloc);
|
| 4132 | items.pop_back();
|
| 4133 |
|
| 4134 | // Validate statistics.
|
| 4135 | {
|
| 4136 | VmaPoolStats poolStats = {};
|
| 4137 | vmaGetPoolStats(g_hAllocator, pool, &poolStats);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4138 | TEST(poolStats.allocationCount == items.size());
|
| 4139 | TEST(poolStats.size = BUF_COUNT * BUF_SIZE);
|
| 4140 | TEST(poolStats.unusedRangeCount == 1);
|
| 4141 | TEST(poolStats.unusedRangeSizeMax == BUF_SIZE);
|
| 4142 | TEST(poolStats.unusedSize == BUF_SIZE);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4143 | }
|
| 4144 |
|
| 4145 | // Free all remaining items.
|
| 4146 | for(size_t i = items.size(); i--; )
|
| 4147 | vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
|
| 4148 | items.clear();
|
| 4149 |
|
| 4150 | // Allocate maximum items again.
|
| 4151 | for(size_t i = 0; i < BUF_COUNT; ++i)
|
| 4152 | {
|
| 4153 | BufItem item;
|
| 4154 | res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4155 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4156 | items.push_back(item);
|
| 4157 | }
|
| 4158 |
|
| 4159 | // Delete every other item.
|
| 4160 | for(size_t i = 0; i < BUF_COUNT / 2; ++i)
|
| 4161 | {
|
| 4162 | vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
|
| 4163 | items.erase(items.begin() + i);
|
| 4164 | }
|
| 4165 |
|
| 4166 | // Defragment!
|
| 4167 | {
|
| 4168 | std::vector<VmaAllocation> allocationsToDefragment(items.size());
|
| 4169 | for(size_t i = 0; i < items.size(); ++i)
|
| 4170 | allocationsToDefragment[i] = items[i].Alloc;
|
| 4171 |
|
| 4172 | VmaDefragmentationStats defragmentationStats;
|
| 4173 | res = vmaDefragment(g_hAllocator, allocationsToDefragment.data(), items.size(), nullptr, nullptr, &defragmentationStats);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4174 | TEST(res == VK_SUCCESS);
|
| 4175 | TEST(defragmentationStats.deviceMemoryBlocksFreed == 2);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4176 | }
|
| 4177 |
|
| 4178 | // Free all remaining items.
|
| 4179 | for(size_t i = items.size(); i--; )
|
| 4180 | vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
|
| 4181 | items.clear();
|
| 4182 |
|
| 4183 | ////////////////////////////////////////////////////////////////////////////////
|
| 4184 | // Test for vmaMakePoolAllocationsLost
|
| 4185 |
|
| 4186 | // Allocate 4 buffers on frame 10.
|
| 4187 | vmaSetCurrentFrameIndex(g_hAllocator, 10);
|
| 4188 | for(size_t i = 0; i < 4; ++i)
|
| 4189 | {
|
| 4190 | BufItem item;
|
| 4191 | res = vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocInfo, &item.Buf, &item.Alloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4192 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4193 | items.push_back(item);
|
| 4194 | }
|
| 4195 |
|
| 4196 | // Touch first 2 of them on frame 11.
|
| 4197 | vmaSetCurrentFrameIndex(g_hAllocator, 11);
|
| 4198 | for(size_t i = 0; i < 2; ++i)
|
| 4199 | {
|
| 4200 | VmaAllocationInfo allocInfo;
|
| 4201 | vmaGetAllocationInfo(g_hAllocator, items[i].Alloc, &allocInfo);
|
| 4202 | }
|
| 4203 |
|
| 4204 | // vmaMakePoolAllocationsLost. Only remaining 2 should be lost.
|
| 4205 | size_t lostCount = 0xDEADC0DE;
|
| 4206 | vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4207 | TEST(lostCount == 2);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4208 |
|
| 4209 | // Make another call. Now 0 should be lost.
|
| 4210 | vmaMakePoolAllocationsLost(g_hAllocator, pool, &lostCount);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4211 | TEST(lostCount == 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4212 |
|
| 4213 | // Make another call, with null count. Should not crash.
|
| 4214 | vmaMakePoolAllocationsLost(g_hAllocator, pool, nullptr);
|
| 4215 |
|
| 4216 | // END: Free all remaining items.
|
| 4217 | for(size_t i = items.size(); i--; )
|
| 4218 | vmaDestroyBuffer(g_hAllocator, items[i].Buf, items[i].Alloc);
|
| 4219 |
|
| 4220 | items.clear();
|
| 4221 |
|
Adam Sawicki | d292417 | 2018-06-11 12:48:46 +0200 | [diff] [blame] | 4222 | ////////////////////////////////////////////////////////////////////////////////
|
| 4223 | // Test for allocation too large for pool
|
| 4224 |
|
| 4225 | {
|
| 4226 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 4227 | allocCreateInfo.pool = pool;
|
| 4228 |
|
| 4229 | VkMemoryRequirements memReq;
|
| 4230 | memReq.memoryTypeBits = UINT32_MAX;
|
| 4231 | memReq.alignment = 1;
|
| 4232 | memReq.size = poolCreateInfo.blockSize + 4;
|
| 4233 |
|
| 4234 | VmaAllocation alloc = nullptr;
|
| 4235 | res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo, &alloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4236 | TEST(res == VK_ERROR_OUT_OF_DEVICE_MEMORY && alloc == nullptr);
|
Adam Sawicki | d292417 | 2018-06-11 12:48:46 +0200 | [diff] [blame] | 4237 | }
|
| 4238 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4239 | vmaDestroyPool(g_hAllocator, pool);
|
| 4240 | }
|
| 4241 |
|
Adam Sawicki | e44c626 | 2018-06-15 14:30:39 +0200 | [diff] [blame] | 4242 | static bool ValidatePattern(const void* pMemory, size_t size, uint8_t pattern)
|
| 4243 | {
|
| 4244 | const uint8_t* pBytes = (const uint8_t*)pMemory;
|
| 4245 | for(size_t i = 0; i < size; ++i)
|
| 4246 | {
|
| 4247 | if(pBytes[i] != pattern)
|
| 4248 | {
|
| 4249 | return false;
|
| 4250 | }
|
| 4251 | }
|
| 4252 | return true;
|
| 4253 | }
|
| 4254 |
|
| 4255 | static void TestAllocationsInitialization()
|
| 4256 | {
|
| 4257 | VkResult res;
|
| 4258 |
|
| 4259 | const size_t BUF_SIZE = 1024;
|
| 4260 |
|
| 4261 | // Create pool.
|
| 4262 |
|
| 4263 | VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 4264 | bufInfo.size = BUF_SIZE;
|
| 4265 | bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 4266 |
|
| 4267 | VmaAllocationCreateInfo dummyBufAllocCreateInfo = {};
|
| 4268 | dummyBufAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 4269 |
|
| 4270 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 4271 | poolCreateInfo.blockSize = BUF_SIZE * 10;
|
| 4272 | poolCreateInfo.minBlockCount = 1; // To keep memory alive while pool exists.
|
| 4273 | poolCreateInfo.maxBlockCount = 1;
|
| 4274 | res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufInfo, &dummyBufAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4275 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | e44c626 | 2018-06-15 14:30:39 +0200 | [diff] [blame] | 4276 |
|
| 4277 | VmaAllocationCreateInfo bufAllocCreateInfo = {};
|
| 4278 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &bufAllocCreateInfo.pool);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4279 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | e44c626 | 2018-06-15 14:30:39 +0200 | [diff] [blame] | 4280 |
|
| 4281 | // Create one persistently mapped buffer to keep memory of this block mapped,
|
| 4282 | // so that pointer to mapped data will remain (more or less...) valid even
|
| 4283 | // after destruction of other allocations.
|
| 4284 |
|
| 4285 | bufAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
|
| 4286 | VkBuffer firstBuf;
|
| 4287 | VmaAllocation firstAlloc;
|
| 4288 | res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &firstBuf, &firstAlloc, nullptr);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4289 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | e44c626 | 2018-06-15 14:30:39 +0200 | [diff] [blame] | 4290 |
|
| 4291 | // Test buffers.
|
| 4292 |
|
| 4293 | for(uint32_t i = 0; i < 2; ++i)
|
| 4294 | {
|
| 4295 | const bool persistentlyMapped = i == 0;
|
| 4296 | bufAllocCreateInfo.flags = persistentlyMapped ? VMA_ALLOCATION_CREATE_MAPPED_BIT : 0;
|
| 4297 | VkBuffer buf;
|
| 4298 | VmaAllocation alloc;
|
| 4299 | VmaAllocationInfo allocInfo;
|
| 4300 | res = vmaCreateBuffer(g_hAllocator, &bufInfo, &bufAllocCreateInfo, &buf, &alloc, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4301 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | e44c626 | 2018-06-15 14:30:39 +0200 | [diff] [blame] | 4302 |
|
| 4303 | void* pMappedData;
|
| 4304 | if(!persistentlyMapped)
|
| 4305 | {
|
| 4306 | res = vmaMapMemory(g_hAllocator, alloc, &pMappedData);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4307 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | e44c626 | 2018-06-15 14:30:39 +0200 | [diff] [blame] | 4308 | }
|
| 4309 | else
|
| 4310 | {
|
| 4311 | pMappedData = allocInfo.pMappedData;
|
| 4312 | }
|
| 4313 |
|
| 4314 | // Validate initialized content
|
| 4315 | bool valid = ValidatePattern(pMappedData, BUF_SIZE, 0xDC);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4316 | TEST(valid);
|
Adam Sawicki | e44c626 | 2018-06-15 14:30:39 +0200 | [diff] [blame] | 4317 |
|
| 4318 | if(!persistentlyMapped)
|
| 4319 | {
|
| 4320 | vmaUnmapMemory(g_hAllocator, alloc);
|
| 4321 | }
|
| 4322 |
|
| 4323 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 4324 |
|
| 4325 | // Validate freed content
|
| 4326 | valid = ValidatePattern(pMappedData, BUF_SIZE, 0xEF);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4327 | TEST(valid);
|
Adam Sawicki | e44c626 | 2018-06-15 14:30:39 +0200 | [diff] [blame] | 4328 | }
|
| 4329 |
|
| 4330 | vmaDestroyBuffer(g_hAllocator, firstBuf, firstAlloc);
|
| 4331 | vmaDestroyPool(g_hAllocator, bufAllocCreateInfo.pool);
|
| 4332 | }
|
| 4333 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4334 | static void TestPool_Benchmark(
|
| 4335 | PoolTestResult& outResult,
|
| 4336 | const PoolTestConfig& config)
|
| 4337 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4338 | TEST(config.ThreadCount > 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4339 |
|
| 4340 | RandomNumberGenerator mainRand{config.RandSeed};
|
| 4341 |
|
| 4342 | uint32_t allocationSizeProbabilitySum = std::accumulate(
|
| 4343 | config.AllocationSizes.begin(),
|
| 4344 | config.AllocationSizes.end(),
|
| 4345 | 0u,
|
| 4346 | [](uint32_t sum, const AllocationSize& allocSize) {
|
| 4347 | return sum + allocSize.Probability;
|
| 4348 | });
|
| 4349 |
|
| 4350 | VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 4351 | bufferInfo.size = 256; // Whatever.
|
| 4352 | bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
| 4353 |
|
| 4354 | VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
|
| 4355 | imageInfo.imageType = VK_IMAGE_TYPE_2D;
|
| 4356 | imageInfo.extent.width = 256; // Whatever.
|
| 4357 | imageInfo.extent.height = 256; // Whatever.
|
| 4358 | imageInfo.extent.depth = 1;
|
| 4359 | imageInfo.mipLevels = 1;
|
| 4360 | imageInfo.arrayLayers = 1;
|
| 4361 | imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
|
| 4362 | imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL; // LINEAR if CPU memory.
|
| 4363 | imageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
|
| 4364 | imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // TRANSFER_SRC if CPU memory.
|
| 4365 | imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
| 4366 |
|
| 4367 | uint32_t bufferMemoryTypeBits = UINT32_MAX;
|
| 4368 | {
|
| 4369 | VkBuffer dummyBuffer;
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 4370 | VkResult res = vkCreateBuffer(g_hDevice, &bufferInfo, g_Allocs, &dummyBuffer);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4371 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4372 |
|
| 4373 | VkMemoryRequirements memReq;
|
| 4374 | vkGetBufferMemoryRequirements(g_hDevice, dummyBuffer, &memReq);
|
| 4375 | bufferMemoryTypeBits = memReq.memoryTypeBits;
|
| 4376 |
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 4377 | vkDestroyBuffer(g_hDevice, dummyBuffer, g_Allocs);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4378 | }
|
| 4379 |
|
| 4380 | uint32_t imageMemoryTypeBits = UINT32_MAX;
|
| 4381 | {
|
| 4382 | VkImage dummyImage;
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 4383 | VkResult res = vkCreateImage(g_hDevice, &imageInfo, g_Allocs, &dummyImage);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4384 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4385 |
|
| 4386 | VkMemoryRequirements memReq;
|
| 4387 | vkGetImageMemoryRequirements(g_hDevice, dummyImage, &memReq);
|
| 4388 | imageMemoryTypeBits = memReq.memoryTypeBits;
|
| 4389 |
|
Adam Sawicki | 1f84f62 | 2019-07-02 13:40:01 +0200 | [diff] [blame] | 4390 | vkDestroyImage(g_hDevice, dummyImage, g_Allocs);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4391 | }
|
| 4392 |
|
| 4393 | uint32_t memoryTypeBits = 0;
|
| 4394 | if(config.UsesBuffers() && config.UsesImages())
|
| 4395 | {
|
| 4396 | memoryTypeBits = bufferMemoryTypeBits & imageMemoryTypeBits;
|
| 4397 | if(memoryTypeBits == 0)
|
| 4398 | {
|
| 4399 | PrintWarning(L"Cannot test buffers + images in the same memory pool on this GPU.");
|
| 4400 | return;
|
| 4401 | }
|
| 4402 | }
|
| 4403 | else if(config.UsesBuffers())
|
| 4404 | memoryTypeBits = bufferMemoryTypeBits;
|
| 4405 | else if(config.UsesImages())
|
| 4406 | memoryTypeBits = imageMemoryTypeBits;
|
| 4407 | else
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4408 | TEST(0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4409 |
|
| 4410 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 4411 | poolCreateInfo.memoryTypeIndex = 0;
|
| 4412 | poolCreateInfo.minBlockCount = 1;
|
| 4413 | poolCreateInfo.maxBlockCount = 1;
|
| 4414 | poolCreateInfo.blockSize = config.PoolSize;
|
| 4415 | poolCreateInfo.frameInUseCount = 1;
|
| 4416 |
|
Adam Sawicki | 26eaa3b | 2021-02-18 15:53:18 +0100 | [diff] [blame] | 4417 | VmaPool pool = VK_NULL_HANDLE;
|
| 4418 | VkResult res;
|
| 4419 | // Loop over memory types because we sometimes allocate a big block here,
|
| 4420 | // while the most eligible DEVICE_LOCAL heap may be only 256 MB on some GPUs.
|
| 4421 | while(memoryTypeBits)
|
| 4422 | {
|
| 4423 | VmaAllocationCreateInfo dummyAllocCreateInfo = {};
|
| 4424 | dummyAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 4425 | vmaFindMemoryTypeIndex(g_hAllocator, memoryTypeBits, &dummyAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4426 |
|
Adam Sawicki | 26eaa3b | 2021-02-18 15:53:18 +0100 | [diff] [blame] | 4427 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
|
| 4428 | if(res == VK_SUCCESS)
|
| 4429 | break;
|
| 4430 | memoryTypeBits &= ~(1u << poolCreateInfo.memoryTypeIndex);
|
| 4431 | }
|
| 4432 | TEST(pool);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4433 |
|
| 4434 | // Start time measurement - after creating pool and initializing data structures.
|
| 4435 | time_point timeBeg = std::chrono::high_resolution_clock::now();
|
| 4436 |
|
| 4437 | ////////////////////////////////////////////////////////////////////////////////
|
| 4438 | // ThreadProc
|
| 4439 | auto ThreadProc = [&](
|
| 4440 | PoolTestThreadResult* outThreadResult,
|
| 4441 | uint32_t randSeed,
|
| 4442 | HANDLE frameStartEvent,
|
| 4443 | HANDLE frameEndEvent) -> void
|
| 4444 | {
|
| 4445 | RandomNumberGenerator threadRand{randSeed};
|
| 4446 |
|
| 4447 | outThreadResult->AllocationTimeMin = duration::max();
|
| 4448 | outThreadResult->AllocationTimeSum = duration::zero();
|
| 4449 | outThreadResult->AllocationTimeMax = duration::min();
|
| 4450 | outThreadResult->DeallocationTimeMin = duration::max();
|
| 4451 | outThreadResult->DeallocationTimeSum = duration::zero();
|
| 4452 | outThreadResult->DeallocationTimeMax = duration::min();
|
| 4453 | outThreadResult->AllocationCount = 0;
|
| 4454 | outThreadResult->DeallocationCount = 0;
|
| 4455 | outThreadResult->LostAllocationCount = 0;
|
| 4456 | outThreadResult->LostAllocationTotalSize = 0;
|
| 4457 | outThreadResult->FailedAllocationCount = 0;
|
| 4458 | outThreadResult->FailedAllocationTotalSize = 0;
|
| 4459 |
|
| 4460 | struct Item
|
| 4461 | {
|
| 4462 | VkDeviceSize BufferSize;
|
| 4463 | VkExtent2D ImageSize;
|
| 4464 | VkBuffer Buf;
|
| 4465 | VkImage Image;
|
| 4466 | VmaAllocation Alloc;
|
| 4467 |
|
| 4468 | VkDeviceSize CalcSizeBytes() const
|
| 4469 | {
|
| 4470 | return BufferSize +
|
| 4471 | ImageSize.width * ImageSize.height * 4;
|
| 4472 | }
|
| 4473 | };
|
| 4474 | std::vector<Item> unusedItems, usedItems;
|
| 4475 |
|
| 4476 | const size_t threadTotalItemCount = config.TotalItemCount / config.ThreadCount;
|
| 4477 |
|
| 4478 | // Create all items - all unused, not yet allocated.
|
| 4479 | for(size_t i = 0; i < threadTotalItemCount; ++i)
|
| 4480 | {
|
| 4481 | Item item = {};
|
| 4482 |
|
| 4483 | uint32_t allocSizeIndex = 0;
|
| 4484 | uint32_t r = threadRand.Generate() % allocationSizeProbabilitySum;
|
| 4485 | while(r >= config.AllocationSizes[allocSizeIndex].Probability)
|
| 4486 | r -= config.AllocationSizes[allocSizeIndex++].Probability;
|
| 4487 |
|
| 4488 | const AllocationSize& allocSize = config.AllocationSizes[allocSizeIndex];
|
| 4489 | if(allocSize.BufferSizeMax > 0)
|
| 4490 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4491 | TEST(allocSize.BufferSizeMin > 0);
|
| 4492 | TEST(allocSize.ImageSizeMin == 0 && allocSize.ImageSizeMax == 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4493 | if(allocSize.BufferSizeMax == allocSize.BufferSizeMin)
|
| 4494 | item.BufferSize = allocSize.BufferSizeMin;
|
| 4495 | else
|
| 4496 | {
|
| 4497 | item.BufferSize = allocSize.BufferSizeMin + threadRand.Generate() % (allocSize.BufferSizeMax - allocSize.BufferSizeMin);
|
| 4498 | item.BufferSize = item.BufferSize / 16 * 16;
|
| 4499 | }
|
| 4500 | }
|
| 4501 | else
|
| 4502 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4503 | TEST(allocSize.ImageSizeMin > 0 && allocSize.ImageSizeMax > 0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4504 | if(allocSize.ImageSizeMax == allocSize.ImageSizeMin)
|
| 4505 | item.ImageSize.width = item.ImageSize.height = allocSize.ImageSizeMax;
|
| 4506 | else
|
| 4507 | {
|
| 4508 | item.ImageSize.width = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
|
| 4509 | item.ImageSize.height = allocSize.ImageSizeMin + threadRand.Generate() % (allocSize.ImageSizeMax - allocSize.ImageSizeMin);
|
| 4510 | }
|
| 4511 | }
|
| 4512 |
|
| 4513 | unusedItems.push_back(item);
|
| 4514 | }
|
| 4515 |
|
| 4516 | auto Allocate = [&](Item& item) -> VkResult
|
| 4517 | {
|
| 4518 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 4519 | allocCreateInfo.pool = pool;
|
| 4520 | allocCreateInfo.flags = VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT |
|
| 4521 | VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
|
| 4522 |
|
| 4523 | if(item.BufferSize)
|
| 4524 | {
|
| 4525 | bufferInfo.size = item.BufferSize;
|
| 4526 | PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
|
| 4527 | return vmaCreateBuffer(g_hAllocator, &bufferInfo, &allocCreateInfo, &item.Buf, &item.Alloc, nullptr);
|
| 4528 | }
|
| 4529 | else
|
| 4530 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4531 | TEST(item.ImageSize.width && item.ImageSize.height);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4532 |
|
| 4533 | imageInfo.extent.width = item.ImageSize.width;
|
| 4534 | imageInfo.extent.height = item.ImageSize.height;
|
| 4535 | PoolAllocationTimeRegisterObj timeRegisterObj(*outThreadResult);
|
| 4536 | return vmaCreateImage(g_hAllocator, &imageInfo, &allocCreateInfo, &item.Image, &item.Alloc, nullptr);
|
| 4537 | }
|
| 4538 | };
|
| 4539 |
|
| 4540 | ////////////////////////////////////////////////////////////////////////////////
|
| 4541 | // Frames
|
| 4542 | for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
|
| 4543 | {
|
| 4544 | WaitForSingleObject(frameStartEvent, INFINITE);
|
| 4545 |
|
| 4546 | // Always make some percent of used bufs unused, to choose different used ones.
|
| 4547 | const size_t bufsToMakeUnused = usedItems.size() * config.ItemsToMakeUnusedPercent / 100;
|
| 4548 | for(size_t i = 0; i < bufsToMakeUnused; ++i)
|
| 4549 | {
|
| 4550 | size_t index = threadRand.Generate() % usedItems.size();
|
| 4551 | unusedItems.push_back(usedItems[index]);
|
| 4552 | usedItems.erase(usedItems.begin() + index);
|
| 4553 | }
|
| 4554 |
|
| 4555 | // Determine which bufs we want to use in this frame.
|
| 4556 | const size_t usedBufCount = (threadRand.Generate() % (config.UsedItemCountMax - config.UsedItemCountMin) + config.UsedItemCountMin)
|
| 4557 | / config.ThreadCount;
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4558 | TEST(usedBufCount < usedItems.size() + unusedItems.size());
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4559 | // Move some used to unused.
|
| 4560 | while(usedBufCount < usedItems.size())
|
| 4561 | {
|
| 4562 | size_t index = threadRand.Generate() % usedItems.size();
|
| 4563 | unusedItems.push_back(usedItems[index]);
|
| 4564 | usedItems.erase(usedItems.begin() + index);
|
| 4565 | }
|
| 4566 | // Move some unused to used.
|
| 4567 | while(usedBufCount > usedItems.size())
|
| 4568 | {
|
| 4569 | size_t index = threadRand.Generate() % unusedItems.size();
|
| 4570 | usedItems.push_back(unusedItems[index]);
|
| 4571 | unusedItems.erase(unusedItems.begin() + index);
|
| 4572 | }
|
| 4573 |
|
| 4574 | uint32_t touchExistingCount = 0;
|
| 4575 | uint32_t touchLostCount = 0;
|
| 4576 | uint32_t createSucceededCount = 0;
|
| 4577 | uint32_t createFailedCount = 0;
|
| 4578 |
|
| 4579 | // Touch all used bufs. If not created or lost, allocate.
|
| 4580 | for(size_t i = 0; i < usedItems.size(); ++i)
|
| 4581 | {
|
| 4582 | Item& item = usedItems[i];
|
| 4583 | // Not yet created.
|
| 4584 | if(item.Alloc == VK_NULL_HANDLE)
|
| 4585 | {
|
| 4586 | res = Allocate(item);
|
| 4587 | ++outThreadResult->AllocationCount;
|
| 4588 | if(res != VK_SUCCESS)
|
| 4589 | {
|
| 4590 | item.Alloc = VK_NULL_HANDLE;
|
| 4591 | item.Buf = VK_NULL_HANDLE;
|
| 4592 | ++outThreadResult->FailedAllocationCount;
|
| 4593 | outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
|
| 4594 | ++createFailedCount;
|
| 4595 | }
|
| 4596 | else
|
| 4597 | ++createSucceededCount;
|
| 4598 | }
|
| 4599 | else
|
| 4600 | {
|
| 4601 | // Touch.
|
| 4602 | VmaAllocationInfo allocInfo;
|
| 4603 | vmaGetAllocationInfo(g_hAllocator, item.Alloc, &allocInfo);
|
| 4604 | // Lost.
|
| 4605 | if(allocInfo.deviceMemory == VK_NULL_HANDLE)
|
| 4606 | {
|
| 4607 | ++touchLostCount;
|
| 4608 |
|
| 4609 | // Destroy.
|
| 4610 | {
|
| 4611 | PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
|
| 4612 | if(item.Buf)
|
| 4613 | vmaDestroyBuffer(g_hAllocator, item.Buf, item.Alloc);
|
| 4614 | else
|
| 4615 | vmaDestroyImage(g_hAllocator, item.Image, item.Alloc);
|
| 4616 | ++outThreadResult->DeallocationCount;
|
| 4617 | }
|
| 4618 | item.Alloc = VK_NULL_HANDLE;
|
| 4619 | item.Buf = VK_NULL_HANDLE;
|
| 4620 |
|
| 4621 | ++outThreadResult->LostAllocationCount;
|
| 4622 | outThreadResult->LostAllocationTotalSize += item.CalcSizeBytes();
|
| 4623 |
|
| 4624 | // Recreate.
|
| 4625 | res = Allocate(item);
|
| 4626 | ++outThreadResult->AllocationCount;
|
| 4627 | // Creation failed.
|
| 4628 | if(res != VK_SUCCESS)
|
| 4629 | {
|
| 4630 | ++outThreadResult->FailedAllocationCount;
|
| 4631 | outThreadResult->FailedAllocationTotalSize += item.CalcSizeBytes();
|
| 4632 | ++createFailedCount;
|
| 4633 | }
|
| 4634 | else
|
| 4635 | ++createSucceededCount;
|
| 4636 | }
|
| 4637 | else
|
| 4638 | ++touchExistingCount;
|
| 4639 | }
|
| 4640 | }
|
| 4641 |
|
| 4642 | /*
|
| 4643 | printf("Thread %u frame %u: Touch existing %u lost %u, create succeeded %u failed %u\n",
|
| 4644 | randSeed, frameIndex,
|
| 4645 | touchExistingCount, touchLostCount,
|
| 4646 | createSucceededCount, createFailedCount);
|
| 4647 | */
|
| 4648 |
|
| 4649 | SetEvent(frameEndEvent);
|
| 4650 | }
|
| 4651 |
|
| 4652 | // Free all remaining items.
|
| 4653 | for(size_t i = usedItems.size(); i--; )
|
| 4654 | {
|
| 4655 | PoolDeallocationTimeRegisterObj timeRegisterObj(*outThreadResult);
|
| 4656 | if(usedItems[i].Buf)
|
| 4657 | vmaDestroyBuffer(g_hAllocator, usedItems[i].Buf, usedItems[i].Alloc);
|
| 4658 | else
|
| 4659 | vmaDestroyImage(g_hAllocator, usedItems[i].Image, usedItems[i].Alloc);
|
| 4660 | ++outThreadResult->DeallocationCount;
|
| 4661 | }
|
| 4662 | for(size_t i = unusedItems.size(); i--; )
|
| 4663 | {
|
| 4664 | PoolDeallocationTimeRegisterObj timeRegisterOb(*outThreadResult);
|
| 4665 | if(unusedItems[i].Buf)
|
| 4666 | vmaDestroyBuffer(g_hAllocator, unusedItems[i].Buf, unusedItems[i].Alloc);
|
| 4667 | else
|
| 4668 | vmaDestroyImage(g_hAllocator, unusedItems[i].Image, unusedItems[i].Alloc);
|
| 4669 | ++outThreadResult->DeallocationCount;
|
| 4670 | }
|
| 4671 | };
|
| 4672 |
|
| 4673 | // Launch threads.
|
| 4674 | uint32_t threadRandSeed = mainRand.Generate();
|
| 4675 | std::vector<HANDLE> frameStartEvents{config.ThreadCount};
|
| 4676 | std::vector<HANDLE> frameEndEvents{config.ThreadCount};
|
| 4677 | std::vector<std::thread> bkgThreads;
|
| 4678 | std::vector<PoolTestThreadResult> threadResults{config.ThreadCount};
|
| 4679 | for(uint32_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
|
| 4680 | {
|
| 4681 | frameStartEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
|
| 4682 | frameEndEvents[threadIndex] = CreateEvent(NULL, FALSE, FALSE, NULL);
|
| 4683 | bkgThreads.emplace_back(std::bind(
|
| 4684 | ThreadProc,
|
| 4685 | &threadResults[threadIndex],
|
| 4686 | threadRandSeed + threadIndex,
|
| 4687 | frameStartEvents[threadIndex],
|
| 4688 | frameEndEvents[threadIndex]));
|
| 4689 | }
|
| 4690 |
|
| 4691 | // Execute frames.
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 4692 | TEST(config.ThreadCount <= MAXIMUM_WAIT_OBJECTS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 4693 | for(uint32_t frameIndex = 0; frameIndex < config.FrameCount; ++frameIndex)
|
| 4694 | {
|
| 4695 | vmaSetCurrentFrameIndex(g_hAllocator, frameIndex);
|
| 4696 | for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
|
| 4697 | SetEvent(frameStartEvents[threadIndex]);
|
| 4698 | WaitForMultipleObjects(config.ThreadCount, &frameEndEvents[0], TRUE, INFINITE);
|
| 4699 | }
|
| 4700 |
|
| 4701 | // Wait for threads finished
|
| 4702 | for(size_t i = 0; i < bkgThreads.size(); ++i)
|
| 4703 | {
|
| 4704 | bkgThreads[i].join();
|
| 4705 | CloseHandle(frameEndEvents[i]);
|
| 4706 | CloseHandle(frameStartEvents[i]);
|
| 4707 | }
|
| 4708 | bkgThreads.clear();
|
| 4709 |
|
| 4710 | // Finish time measurement - before destroying pool.
|
| 4711 | outResult.TotalTime = std::chrono::high_resolution_clock::now() - timeBeg;
|
| 4712 |
|
| 4713 | vmaDestroyPool(g_hAllocator, pool);
|
| 4714 |
|
| 4715 | outResult.AllocationTimeMin = duration::max();
|
| 4716 | outResult.AllocationTimeAvg = duration::zero();
|
| 4717 | outResult.AllocationTimeMax = duration::min();
|
| 4718 | outResult.DeallocationTimeMin = duration::max();
|
| 4719 | outResult.DeallocationTimeAvg = duration::zero();
|
| 4720 | outResult.DeallocationTimeMax = duration::min();
|
| 4721 | outResult.LostAllocationCount = 0;
|
| 4722 | outResult.LostAllocationTotalSize = 0;
|
| 4723 | outResult.FailedAllocationCount = 0;
|
| 4724 | outResult.FailedAllocationTotalSize = 0;
|
| 4725 | size_t allocationCount = 0;
|
| 4726 | size_t deallocationCount = 0;
|
| 4727 | for(size_t threadIndex = 0; threadIndex < config.ThreadCount; ++threadIndex)
|
| 4728 | {
|
| 4729 | const PoolTestThreadResult& threadResult = threadResults[threadIndex];
|
| 4730 | outResult.AllocationTimeMin = std::min(outResult.AllocationTimeMin, threadResult.AllocationTimeMin);
|
| 4731 | outResult.AllocationTimeMax = std::max(outResult.AllocationTimeMax, threadResult.AllocationTimeMax);
|
| 4732 | outResult.AllocationTimeAvg += threadResult.AllocationTimeSum;
|
| 4733 | outResult.DeallocationTimeMin = std::min(outResult.DeallocationTimeMin, threadResult.DeallocationTimeMin);
|
| 4734 | outResult.DeallocationTimeMax = std::max(outResult.DeallocationTimeMax, threadResult.DeallocationTimeMax);
|
| 4735 | outResult.DeallocationTimeAvg += threadResult.DeallocationTimeSum;
|
| 4736 | allocationCount += threadResult.AllocationCount;
|
| 4737 | deallocationCount += threadResult.DeallocationCount;
|
| 4738 | outResult.FailedAllocationCount += threadResult.FailedAllocationCount;
|
| 4739 | outResult.FailedAllocationTotalSize += threadResult.FailedAllocationTotalSize;
|
| 4740 | outResult.LostAllocationCount += threadResult.LostAllocationCount;
|
| 4741 | outResult.LostAllocationTotalSize += threadResult.LostAllocationTotalSize;
|
| 4742 | }
|
| 4743 | if(allocationCount)
|
| 4744 | outResult.AllocationTimeAvg /= allocationCount;
|
| 4745 | if(deallocationCount)
|
| 4746 | outResult.DeallocationTimeAvg /= deallocationCount;
|
| 4747 | }
|
| 4748 |
|
| 4749 | static inline bool MemoryRegionsOverlap(char* ptr1, size_t size1, char* ptr2, size_t size2)
|
| 4750 | {
|
| 4751 | if(ptr1 < ptr2)
|
| 4752 | return ptr1 + size1 > ptr2;
|
| 4753 | else if(ptr2 < ptr1)
|
| 4754 | return ptr2 + size2 > ptr1;
|
| 4755 | else
|
| 4756 | return true;
|
| 4757 | }
|
| 4758 |
|
Adam Sawicki | efa88c4 | 2019-11-18 16:33:56 +0100 | [diff] [blame] | 4759 | static void TestMemoryUsage()
|
| 4760 | {
|
| 4761 | wprintf(L"Testing memory usage:\n");
|
| 4762 |
|
Adam Sawicki | 6918555 | 2019-11-18 17:03:34 +0100 | [diff] [blame] | 4763 | static const VmaMemoryUsage lastUsage = VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED;
|
Adam Sawicki | efa88c4 | 2019-11-18 16:33:56 +0100 | [diff] [blame] | 4764 | for(uint32_t usage = 0; usage <= lastUsage; ++usage)
|
| 4765 | {
|
| 4766 | switch(usage)
|
| 4767 | {
|
| 4768 | case VMA_MEMORY_USAGE_UNKNOWN: printf(" VMA_MEMORY_USAGE_UNKNOWN:\n"); break;
|
| 4769 | case VMA_MEMORY_USAGE_GPU_ONLY: printf(" VMA_MEMORY_USAGE_GPU_ONLY:\n"); break;
|
| 4770 | case VMA_MEMORY_USAGE_CPU_ONLY: printf(" VMA_MEMORY_USAGE_CPU_ONLY:\n"); break;
|
| 4771 | case VMA_MEMORY_USAGE_CPU_TO_GPU: printf(" VMA_MEMORY_USAGE_CPU_TO_GPU:\n"); break;
|
| 4772 | case VMA_MEMORY_USAGE_GPU_TO_CPU: printf(" VMA_MEMORY_USAGE_GPU_TO_CPU:\n"); break;
|
| 4773 | case VMA_MEMORY_USAGE_CPU_COPY: printf(" VMA_MEMORY_USAGE_CPU_COPY:\n"); break;
|
Adam Sawicki | 6918555 | 2019-11-18 17:03:34 +0100 | [diff] [blame] | 4774 | case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED: printf(" VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED:\n"); break;
|
Adam Sawicki | efa88c4 | 2019-11-18 16:33:56 +0100 | [diff] [blame] | 4775 | default: assert(0);
|
| 4776 | }
|
| 4777 |
|
| 4778 | auto printResult = [](const char* testName, VkResult res, uint32_t memoryTypeBits, uint32_t memoryTypeIndex)
|
| 4779 | {
|
| 4780 | if(res == VK_SUCCESS)
|
| 4781 | printf(" %s: memoryTypeBits=0x%X, memoryTypeIndex=%u\n", testName, memoryTypeBits, memoryTypeIndex);
|
| 4782 | else
|
| 4783 | printf(" %s: memoryTypeBits=0x%X, FAILED with res=%d\n", testName, memoryTypeBits, (int32_t)res);
|
| 4784 | };
|
| 4785 |
|
| 4786 | // 1: Buffer for copy
|
| 4787 | {
|
| 4788 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 4789 | bufCreateInfo.size = 65536;
|
| 4790 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 4791 |
|
| 4792 | VkBuffer buf = VK_NULL_HANDLE;
|
| 4793 | VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
|
| 4794 | TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
|
| 4795 |
|
| 4796 | VkMemoryRequirements memReq = {};
|
| 4797 | vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
|
| 4798 |
|
| 4799 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 4800 | allocCreateInfo.usage = (VmaMemoryUsage)usage;
|
| 4801 | VmaAllocation alloc = VK_NULL_HANDLE;
|
| 4802 | VmaAllocationInfo allocInfo = {};
|
| 4803 | res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
|
| 4804 | if(res == VK_SUCCESS)
|
| 4805 | {
|
| 4806 | TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
|
| 4807 | res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
|
| 4808 | TEST(res == VK_SUCCESS);
|
| 4809 | }
|
| 4810 | printResult("Buffer TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
|
| 4811 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 4812 | }
|
| 4813 |
|
| 4814 | // 2: Vertex buffer
|
| 4815 | {
|
| 4816 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 4817 | bufCreateInfo.size = 65536;
|
| 4818 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
|
| 4819 |
|
| 4820 | VkBuffer buf = VK_NULL_HANDLE;
|
| 4821 | VkResult res = vkCreateBuffer(g_hDevice, &bufCreateInfo, g_Allocs, &buf);
|
| 4822 | TEST(res == VK_SUCCESS && buf != VK_NULL_HANDLE);
|
| 4823 |
|
| 4824 | VkMemoryRequirements memReq = {};
|
| 4825 | vkGetBufferMemoryRequirements(g_hDevice, buf, &memReq);
|
| 4826 |
|
| 4827 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 4828 | allocCreateInfo.usage = (VmaMemoryUsage)usage;
|
| 4829 | VmaAllocation alloc = VK_NULL_HANDLE;
|
| 4830 | VmaAllocationInfo allocInfo = {};
|
| 4831 | res = vmaAllocateMemoryForBuffer(g_hAllocator, buf, &allocCreateInfo, &alloc, &allocInfo);
|
| 4832 | if(res == VK_SUCCESS)
|
| 4833 | {
|
| 4834 | TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
|
| 4835 | res = vkBindBufferMemory(g_hDevice, buf, allocInfo.deviceMemory, allocInfo.offset);
|
| 4836 | TEST(res == VK_SUCCESS);
|
| 4837 | }
|
| 4838 | printResult("Buffer TRANSFER_DST + VERTEX_BUFFER", res, memReq.memoryTypeBits, allocInfo.memoryType);
|
| 4839 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 4840 | }
|
| 4841 |
|
| 4842 | // 3: Image for copy, OPTIMAL
|
| 4843 | {
|
| 4844 | VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
|
| 4845 | imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
|
| 4846 | imgCreateInfo.extent.width = 256;
|
| 4847 | imgCreateInfo.extent.height = 256;
|
| 4848 | imgCreateInfo.extent.depth = 1;
|
| 4849 | imgCreateInfo.mipLevels = 1;
|
| 4850 | imgCreateInfo.arrayLayers = 1;
|
| 4851 | imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
|
| 4852 | imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
|
| 4853 | imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
| 4854 | imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
|
| 4855 | imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
| 4856 |
|
| 4857 | VkImage img = VK_NULL_HANDLE;
|
| 4858 | VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
|
| 4859 | TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
|
| 4860 |
|
| 4861 | VkMemoryRequirements memReq = {};
|
| 4862 | vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
|
| 4863 |
|
| 4864 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 4865 | allocCreateInfo.usage = (VmaMemoryUsage)usage;
|
| 4866 | VmaAllocation alloc = VK_NULL_HANDLE;
|
| 4867 | VmaAllocationInfo allocInfo = {};
|
| 4868 | res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
|
| 4869 | if(res == VK_SUCCESS)
|
| 4870 | {
|
| 4871 | TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
|
| 4872 | res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
|
| 4873 | TEST(res == VK_SUCCESS);
|
| 4874 | }
|
| 4875 | printResult("Image OPTIMAL TRANSFER_DST + TRANSFER_SRC", res, memReq.memoryTypeBits, allocInfo.memoryType);
|
| 4876 |
|
| 4877 | vmaDestroyImage(g_hAllocator, img, alloc);
|
| 4878 | }
|
| 4879 |
|
| 4880 | // 4: Image SAMPLED, OPTIMAL
|
| 4881 | {
|
| 4882 | VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
|
| 4883 | imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
|
| 4884 | imgCreateInfo.extent.width = 256;
|
| 4885 | imgCreateInfo.extent.height = 256;
|
| 4886 | imgCreateInfo.extent.depth = 1;
|
| 4887 | imgCreateInfo.mipLevels = 1;
|
| 4888 | imgCreateInfo.arrayLayers = 1;
|
| 4889 | imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
|
| 4890 | imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
|
| 4891 | imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
| 4892 | imgCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
|
| 4893 | imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
| 4894 |
|
| 4895 | VkImage img = VK_NULL_HANDLE;
|
| 4896 | VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
|
| 4897 | TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
|
| 4898 |
|
| 4899 | VkMemoryRequirements memReq = {};
|
| 4900 | vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
|
| 4901 |
|
| 4902 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 4903 | allocCreateInfo.usage = (VmaMemoryUsage)usage;
|
| 4904 | VmaAllocation alloc = VK_NULL_HANDLE;
|
| 4905 | VmaAllocationInfo allocInfo = {};
|
| 4906 | res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
|
| 4907 | if(res == VK_SUCCESS)
|
| 4908 | {
|
| 4909 | TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
|
| 4910 | res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
|
| 4911 | TEST(res == VK_SUCCESS);
|
| 4912 | }
|
| 4913 | printResult("Image OPTIMAL TRANSFER_DST + SAMPLED", res, memReq.memoryTypeBits, allocInfo.memoryType);
|
| 4914 | vmaDestroyImage(g_hAllocator, img, alloc);
|
| 4915 | }
|
| 4916 |
|
| 4917 | // 5: Image COLOR_ATTACHMENT, OPTIMAL
|
| 4918 | {
|
| 4919 | VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
|
| 4920 | imgCreateInfo.imageType = VK_IMAGE_TYPE_2D;
|
| 4921 | imgCreateInfo.extent.width = 256;
|
| 4922 | imgCreateInfo.extent.height = 256;
|
| 4923 | imgCreateInfo.extent.depth = 1;
|
| 4924 | imgCreateInfo.mipLevels = 1;
|
| 4925 | imgCreateInfo.arrayLayers = 1;
|
| 4926 | imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
|
| 4927 | imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
|
| 4928 | imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
| 4929 | imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
|
| 4930 | imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
| 4931 |
|
| 4932 | VkImage img = VK_NULL_HANDLE;
|
| 4933 | VkResult res = vkCreateImage(g_hDevice, &imgCreateInfo, g_Allocs, &img);
|
| 4934 | TEST(res == VK_SUCCESS && img != VK_NULL_HANDLE);
|
| 4935 |
|
| 4936 | VkMemoryRequirements memReq = {};
|
| 4937 | vkGetImageMemoryRequirements(g_hDevice, img, &memReq);
|
| 4938 |
|
| 4939 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 4940 | allocCreateInfo.usage = (VmaMemoryUsage)usage;
|
| 4941 | VmaAllocation alloc = VK_NULL_HANDLE;
|
| 4942 | VmaAllocationInfo allocInfo = {};
|
| 4943 | res = vmaAllocateMemoryForImage(g_hAllocator, img, &allocCreateInfo, &alloc, &allocInfo);
|
| 4944 | if(res == VK_SUCCESS)
|
| 4945 | {
|
| 4946 | TEST((memReq.memoryTypeBits & (1u << allocInfo.memoryType)) != 0);
|
| 4947 | res = vkBindImageMemory(g_hDevice, img, allocInfo.deviceMemory, allocInfo.offset);
|
| 4948 | TEST(res == VK_SUCCESS);
|
| 4949 | }
|
| 4950 | printResult("Image OPTIMAL SAMPLED + COLOR_ATTACHMENT", res, memReq.memoryTypeBits, allocInfo.memoryType);
|
| 4951 | vmaDestroyImage(g_hAllocator, img, alloc);
|
| 4952 | }
|
| 4953 | }
|
| 4954 | }
|
| 4955 |
|
Adam Sawicki | 5088250 | 2020-02-07 16:51:31 +0100 | [diff] [blame] | 4956 | static uint32_t FindDeviceCoherentMemoryTypeBits()
|
| 4957 | {
|
| 4958 | VkPhysicalDeviceMemoryProperties memProps;
|
| 4959 | vkGetPhysicalDeviceMemoryProperties(g_hPhysicalDevice, &memProps);
|
| 4960 |
|
| 4961 | uint32_t memTypeBits = 0;
|
| 4962 | for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
|
| 4963 | {
|
| 4964 | if(memProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD)
|
| 4965 | memTypeBits |= 1u << i;
|
| 4966 | }
|
| 4967 | return memTypeBits;
|
| 4968 | }
|
| 4969 |
|
| 4970 | static void TestDeviceCoherentMemory()
|
| 4971 | {
|
| 4972 | if(!VK_AMD_device_coherent_memory_enabled)
|
| 4973 | return;
|
| 4974 |
|
| 4975 | uint32_t deviceCoherentMemoryTypeBits = FindDeviceCoherentMemoryTypeBits();
|
| 4976 | // Extension is enabled, feature is enabled, and the device still doesn't support any such memory type?
|
| 4977 | // OK then, so it's just fake!
|
| 4978 | if(deviceCoherentMemoryTypeBits == 0)
|
| 4979 | return;
|
| 4980 |
|
| 4981 | wprintf(L"Testing device coherent memory...\n");
|
| 4982 |
|
| 4983 | // 1. Try to allocate buffer from a memory type that is DEVICE_COHERENT.
|
| 4984 |
|
| 4985 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 4986 | bufCreateInfo.size = 0x10000;
|
| 4987 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
| 4988 |
|
| 4989 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 4990 | allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
|
| 4991 | allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD;
|
| 4992 |
|
| 4993 | AllocInfo alloc = {};
|
| 4994 | VmaAllocationInfo allocInfo = {};
|
| 4995 | VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &alloc.m_Buffer, &alloc.m_Allocation, &allocInfo);
|
| 4996 |
|
| 4997 | // Make sure it succeeded and was really created in such memory type.
|
| 4998 | TEST(res == VK_SUCCESS);
|
| 4999 | TEST((1u << allocInfo.memoryType) & deviceCoherentMemoryTypeBits);
|
| 5000 |
|
| 5001 | alloc.Destroy();
|
| 5002 |
|
| 5003 | // 2. Try to create a pool in such memory type.
|
| 5004 | {
|
| 5005 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 5006 |
|
| 5007 | res = vmaFindMemoryTypeIndex(g_hAllocator, UINT32_MAX, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
|
| 5008 | TEST(res == VK_SUCCESS);
|
| 5009 | TEST((1u << poolCreateInfo.memoryTypeIndex) & deviceCoherentMemoryTypeBits);
|
| 5010 |
|
| 5011 | VmaPool pool = VK_NULL_HANDLE;
|
| 5012 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
|
| 5013 | TEST(res == VK_SUCCESS);
|
| 5014 |
|
| 5015 | vmaDestroyPool(g_hAllocator, pool);
|
| 5016 | }
|
| 5017 |
|
| 5018 | // 3. Try the same with a local allocator created without VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT.
|
| 5019 |
|
| 5020 | VmaAllocatorCreateInfo allocatorCreateInfo = {};
|
| 5021 | SetAllocatorCreateInfo(allocatorCreateInfo);
|
| 5022 | allocatorCreateInfo.flags &= ~VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT;
|
| 5023 |
|
| 5024 | VmaAllocator localAllocator = VK_NULL_HANDLE;
|
| 5025 | res = vmaCreateAllocator(&allocatorCreateInfo, &localAllocator);
|
| 5026 | TEST(res == VK_SUCCESS && localAllocator);
|
| 5027 |
|
| 5028 | res = vmaCreateBuffer(localAllocator, &bufCreateInfo, &allocCreateInfo, &alloc.m_Buffer, &alloc.m_Allocation, &allocInfo);
|
| 5029 |
|
| 5030 | // Make sure it failed.
|
| 5031 | TEST(res != VK_SUCCESS && !alloc.m_Buffer && !alloc.m_Allocation);
|
| 5032 |
|
| 5033 | // 4. Try to find memory type.
|
| 5034 | {
|
| 5035 | uint32_t memTypeIndex = UINT_MAX;
|
| 5036 | res = vmaFindMemoryTypeIndex(localAllocator, UINT32_MAX, &allocCreateInfo, &memTypeIndex);
|
| 5037 | TEST(res != VK_SUCCESS);
|
| 5038 | }
|
| 5039 |
|
| 5040 | vmaDestroyAllocator(localAllocator);
|
| 5041 | }
|
| 5042 |
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5043 | static void TestBudget()
|
| 5044 | {
|
| 5045 | wprintf(L"Testing budget...\n");
|
| 5046 |
|
Adam Sawicki | 6a93b8a | 2020-03-09 16:58:18 +0100 | [diff] [blame] | 5047 | static const VkDeviceSize BUF_SIZE = 10ull * 1024 * 1024;
|
Adam Sawicki | 353e367 | 2019-11-02 14:12:05 +0100 | [diff] [blame] | 5048 | static const uint32_t BUF_COUNT = 4;
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5049 |
|
Adam Sawicki | 6a93b8a | 2020-03-09 16:58:18 +0100 | [diff] [blame] | 5050 | const VkPhysicalDeviceMemoryProperties* memProps = {};
|
| 5051 | vmaGetMemoryProperties(g_hAllocator, &memProps);
|
| 5052 |
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5053 | for(uint32_t testIndex = 0; testIndex < 2; ++testIndex)
|
| 5054 | {
|
Adam Sawicki | 353e367 | 2019-11-02 14:12:05 +0100 | [diff] [blame] | 5055 | vmaSetCurrentFrameIndex(g_hAllocator, ++g_FrameIndex);
|
| 5056 |
|
| 5057 | VmaBudget budgetBeg[VK_MAX_MEMORY_HEAPS] = {};
|
| 5058 | vmaGetBudget(g_hAllocator, budgetBeg);
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5059 |
|
Adam Sawicki | 6a93b8a | 2020-03-09 16:58:18 +0100 | [diff] [blame] | 5060 | for(uint32_t i = 0; i < memProps->memoryHeapCount; ++i)
|
Adam Sawicki | 4ac8ff8 | 2019-11-18 14:47:33 +0100 | [diff] [blame] | 5061 | {
|
Adam Sawicki | 6a93b8a | 2020-03-09 16:58:18 +0100 | [diff] [blame] | 5062 | TEST(budgetBeg[i].budget > 0);
|
| 5063 | TEST(budgetBeg[i].budget <= memProps->memoryHeaps[i].size);
|
Adam Sawicki | 4ac8ff8 | 2019-11-18 14:47:33 +0100 | [diff] [blame] | 5064 | TEST(budgetBeg[i].allocationBytes <= budgetBeg[i].blockBytes);
|
| 5065 | }
|
| 5066 |
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5067 | VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 5068 | bufInfo.size = BUF_SIZE;
|
| 5069 | bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
| 5070 |
|
| 5071 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 5072 | allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 5073 | if(testIndex == 0)
|
| 5074 | {
|
| 5075 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
|
| 5076 | }
|
| 5077 |
|
| 5078 | // CREATE BUFFERS
|
| 5079 | uint32_t heapIndex = 0;
|
| 5080 | BufferInfo bufInfos[BUF_COUNT] = {};
|
| 5081 | for(uint32_t bufIndex = 0; bufIndex < BUF_COUNT; ++bufIndex)
|
| 5082 | {
|
| 5083 | VmaAllocationInfo allocInfo;
|
| 5084 | VkResult res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
|
| 5085 | &bufInfos[bufIndex].Buffer, &bufInfos[bufIndex].Allocation, &allocInfo);
|
| 5086 | TEST(res == VK_SUCCESS);
|
| 5087 | if(bufIndex == 0)
|
| 5088 | {
|
| 5089 | heapIndex = MemoryTypeToHeap(allocInfo.memoryType);
|
| 5090 | }
|
| 5091 | else
|
| 5092 | {
|
| 5093 | // All buffers need to fall into the same heap.
|
| 5094 | TEST(MemoryTypeToHeap(allocInfo.memoryType) == heapIndex);
|
| 5095 | }
|
| 5096 | }
|
| 5097 |
|
Adam Sawicki | 353e367 | 2019-11-02 14:12:05 +0100 | [diff] [blame] | 5098 | VmaBudget budgetWithBufs[VK_MAX_MEMORY_HEAPS] = {};
|
| 5099 | vmaGetBudget(g_hAllocator, budgetWithBufs);
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5100 |
|
| 5101 | // DESTROY BUFFERS
|
| 5102 | for(size_t bufIndex = BUF_COUNT; bufIndex--; )
|
| 5103 | {
|
| 5104 | vmaDestroyBuffer(g_hAllocator, bufInfos[bufIndex].Buffer, bufInfos[bufIndex].Allocation);
|
| 5105 | }
|
| 5106 |
|
Adam Sawicki | 353e367 | 2019-11-02 14:12:05 +0100 | [diff] [blame] | 5107 | VmaBudget budgetEnd[VK_MAX_MEMORY_HEAPS] = {};
|
| 5108 | vmaGetBudget(g_hAllocator, budgetEnd);
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5109 |
|
| 5110 | // CHECK
|
Adam Sawicki | 6a93b8a | 2020-03-09 16:58:18 +0100 | [diff] [blame] | 5111 | for(uint32_t i = 0; i < memProps->memoryHeapCount; ++i)
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5112 | {
|
Adam Sawicki | 353e367 | 2019-11-02 14:12:05 +0100 | [diff] [blame] | 5113 | TEST(budgetEnd[i].allocationBytes <= budgetEnd[i].blockBytes);
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5114 | if(i == heapIndex)
|
| 5115 | {
|
Adam Sawicki | 353e367 | 2019-11-02 14:12:05 +0100 | [diff] [blame] | 5116 | TEST(budgetEnd[i].allocationBytes == budgetBeg[i].allocationBytes);
|
| 5117 | TEST(budgetWithBufs[i].allocationBytes == budgetBeg[i].allocationBytes + BUF_SIZE * BUF_COUNT);
|
| 5118 | TEST(budgetWithBufs[i].blockBytes >= budgetEnd[i].blockBytes);
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5119 | }
|
| 5120 | else
|
| 5121 | {
|
Adam Sawicki | 353e367 | 2019-11-02 14:12:05 +0100 | [diff] [blame] | 5122 | TEST(budgetEnd[i].allocationBytes == budgetEnd[i].allocationBytes &&
|
| 5123 | budgetEnd[i].allocationBytes == budgetWithBufs[i].allocationBytes);
|
| 5124 | TEST(budgetEnd[i].blockBytes == budgetEnd[i].blockBytes &&
|
| 5125 | budgetEnd[i].blockBytes == budgetWithBufs[i].blockBytes);
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5126 | }
|
| 5127 | }
|
| 5128 | }
|
| 5129 | }
|
| 5130 |
|
Adam Sawicki | 0620c8e | 2020-08-18 16:43:44 +0200 | [diff] [blame] | 5131 | static void TestAliasing()
|
| 5132 | {
|
| 5133 | wprintf(L"Testing aliasing...\n");
|
| 5134 |
|
| 5135 | /*
|
| 5136 | This is just a simple test, more like a code sample to demonstrate it's possible.
|
| 5137 | */
|
| 5138 |
|
| 5139 | // A 512x512 texture to be sampled.
|
| 5140 | VkImageCreateInfo img1CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
|
| 5141 | img1CreateInfo.imageType = VK_IMAGE_TYPE_2D;
|
| 5142 | img1CreateInfo.extent.width = 512;
|
| 5143 | img1CreateInfo.extent.height = 512;
|
| 5144 | img1CreateInfo.extent.depth = 1;
|
| 5145 | img1CreateInfo.mipLevels = 10;
|
| 5146 | img1CreateInfo.arrayLayers = 1;
|
| 5147 | img1CreateInfo.format = VK_FORMAT_R8G8B8A8_SRGB;
|
| 5148 | img1CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
|
| 5149 | img1CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
| 5150 | img1CreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
|
| 5151 | img1CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
| 5152 |
|
| 5153 | // A full screen texture to be used as color attachment.
|
| 5154 | VkImageCreateInfo img2CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
|
| 5155 | img2CreateInfo.imageType = VK_IMAGE_TYPE_2D;
|
| 5156 | img2CreateInfo.extent.width = 1920;
|
| 5157 | img2CreateInfo.extent.height = 1080;
|
| 5158 | img2CreateInfo.extent.depth = 1;
|
| 5159 | img2CreateInfo.mipLevels = 1;
|
| 5160 | img2CreateInfo.arrayLayers = 1;
|
| 5161 | img2CreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
|
| 5162 | img2CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
|
| 5163 | img2CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
| 5164 | img2CreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
|
| 5165 | img2CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
|
| 5166 |
|
| 5167 | VkImage img1 = VK_NULL_HANDLE;
|
| 5168 | ERR_GUARD_VULKAN(vkCreateImage(g_hDevice, &img1CreateInfo, g_Allocs, &img1));
|
| 5169 | VkImage img2 = VK_NULL_HANDLE;
|
| 5170 | ERR_GUARD_VULKAN(vkCreateImage(g_hDevice, &img2CreateInfo, g_Allocs, &img2));
|
| 5171 |
|
| 5172 | VkMemoryRequirements img1MemReq = {};
|
| 5173 | vkGetImageMemoryRequirements(g_hDevice, img1, &img1MemReq);
|
| 5174 | VkMemoryRequirements img2MemReq = {};
|
| 5175 | vkGetImageMemoryRequirements(g_hDevice, img2, &img2MemReq);
|
| 5176 |
|
| 5177 | VkMemoryRequirements finalMemReq = {};
|
| 5178 | finalMemReq.size = std::max(img1MemReq.size, img2MemReq.size);
|
| 5179 | finalMemReq.alignment = std::max(img1MemReq.alignment, img2MemReq.alignment);
|
| 5180 | finalMemReq.memoryTypeBits = img1MemReq.memoryTypeBits & img2MemReq.memoryTypeBits;
|
| 5181 | if(finalMemReq.memoryTypeBits != 0)
|
| 5182 | {
|
| 5183 | wprintf(L" size: max(%llu, %llu) = %llu\n",
|
| 5184 | img1MemReq.size, img2MemReq.size, finalMemReq.size);
|
| 5185 | wprintf(L" alignment: max(%llu, %llu) = %llu\n",
|
| 5186 | img1MemReq.alignment, img2MemReq.alignment, finalMemReq.alignment);
|
| 5187 | wprintf(L" memoryTypeBits: %u & %u = %u\n",
|
| 5188 | img1MemReq.memoryTypeBits, img2MemReq.memoryTypeBits, finalMemReq.memoryTypeBits);
|
| 5189 |
|
| 5190 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 5191 | allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 5192 |
|
| 5193 | VmaAllocation alloc = VK_NULL_HANDLE;
|
| 5194 | ERR_GUARD_VULKAN(vmaAllocateMemory(g_hAllocator, &finalMemReq, &allocCreateInfo, &alloc, nullptr));
|
| 5195 |
|
| 5196 | ERR_GUARD_VULKAN(vmaBindImageMemory(g_hAllocator, alloc, img1));
|
| 5197 | ERR_GUARD_VULKAN(vmaBindImageMemory(g_hAllocator, alloc, img2));
|
| 5198 |
|
| 5199 | // You can use img1, img2 here, but not at the same time!
|
| 5200 |
|
| 5201 | vmaFreeMemory(g_hAllocator, alloc);
|
| 5202 | }
|
| 5203 | else
|
| 5204 | {
|
| 5205 | wprintf(L" Textures cannot alias!\n");
|
| 5206 | }
|
| 5207 |
|
| 5208 | vkDestroyImage(g_hDevice, img2, g_Allocs);
|
| 5209 | vkDestroyImage(g_hDevice, img1, g_Allocs);
|
| 5210 | }
|
| 5211 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5212 | static void TestMapping()
|
| 5213 | {
|
| 5214 | wprintf(L"Testing mapping...\n");
|
| 5215 |
|
| 5216 | VkResult res;
|
| 5217 | uint32_t memTypeIndex = UINT32_MAX;
|
| 5218 |
|
| 5219 | enum TEST
|
| 5220 | {
|
| 5221 | TEST_NORMAL,
|
| 5222 | TEST_POOL,
|
| 5223 | TEST_DEDICATED,
|
| 5224 | TEST_COUNT
|
| 5225 | };
|
| 5226 | for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
|
| 5227 | {
|
| 5228 | VmaPool pool = nullptr;
|
| 5229 | if(testIndex == TEST_POOL)
|
| 5230 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5231 | TEST(memTypeIndex != UINT32_MAX);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5232 | VmaPoolCreateInfo poolInfo = {};
|
| 5233 | poolInfo.memoryTypeIndex = memTypeIndex;
|
| 5234 | res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5235 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5236 | }
|
| 5237 |
|
| 5238 | VkBufferCreateInfo bufInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 5239 | bufInfo.size = 0x10000;
|
| 5240 | bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5241 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5242 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 5243 | allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 5244 | allocCreateInfo.pool = pool;
|
| 5245 | if(testIndex == TEST_DEDICATED)
|
| 5246 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5247 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5248 | VmaAllocationInfo allocInfo;
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5249 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5250 | // Mapped manually
|
| 5251 |
|
| 5252 | // Create 2 buffers.
|
| 5253 | BufferInfo bufferInfos[3];
|
| 5254 | for(size_t i = 0; i < 2; ++i)
|
| 5255 | {
|
| 5256 | res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
|
| 5257 | &bufferInfos[i].Buffer, &bufferInfos[i].Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5258 | TEST(res == VK_SUCCESS);
|
| 5259 | TEST(allocInfo.pMappedData == nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5260 | memTypeIndex = allocInfo.memoryType;
|
| 5261 | }
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 5262 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5263 | // Map buffer 0.
|
| 5264 | char* data00 = nullptr;
|
| 5265 | res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data00);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5266 | TEST(res == VK_SUCCESS && data00 != nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5267 | data00[0xFFFF] = data00[0];
|
| 5268 |
|
| 5269 | // Map buffer 0 second time.
|
| 5270 | char* data01 = nullptr;
|
| 5271 | res = vmaMapMemory(g_hAllocator, bufferInfos[0].Allocation, (void**)&data01);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5272 | TEST(res == VK_SUCCESS && data01 == data00);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5273 |
|
| 5274 | // Map buffer 1.
|
| 5275 | char* data1 = nullptr;
|
| 5276 | res = vmaMapMemory(g_hAllocator, bufferInfos[1].Allocation, (void**)&data1);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5277 | TEST(res == VK_SUCCESS && data1 != nullptr);
|
| 5278 | TEST(!MemoryRegionsOverlap(data00, (size_t)bufInfo.size, data1, (size_t)bufInfo.size));
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5279 | data1[0xFFFF] = data1[0];
|
| 5280 |
|
| 5281 | // Unmap buffer 0 two times.
|
| 5282 | vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
|
| 5283 | vmaUnmapMemory(g_hAllocator, bufferInfos[0].Allocation);
|
| 5284 | vmaGetAllocationInfo(g_hAllocator, bufferInfos[0].Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5285 | TEST(allocInfo.pMappedData == nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5286 |
|
| 5287 | // Unmap buffer 1.
|
| 5288 | vmaUnmapMemory(g_hAllocator, bufferInfos[1].Allocation);
|
| 5289 | vmaGetAllocationInfo(g_hAllocator, bufferInfos[1].Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5290 | TEST(allocInfo.pMappedData == nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5291 |
|
| 5292 | // Create 3rd buffer - persistently mapped.
|
| 5293 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
|
| 5294 | res = vmaCreateBuffer(g_hAllocator, &bufInfo, &allocCreateInfo,
|
| 5295 | &bufferInfos[2].Buffer, &bufferInfos[2].Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5296 | TEST(res == VK_SUCCESS && allocInfo.pMappedData != nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5297 |
|
| 5298 | // Map buffer 2.
|
| 5299 | char* data2 = nullptr;
|
| 5300 | res = vmaMapMemory(g_hAllocator, bufferInfos[2].Allocation, (void**)&data2);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5301 | TEST(res == VK_SUCCESS && data2 == allocInfo.pMappedData);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5302 | data2[0xFFFF] = data2[0];
|
| 5303 |
|
| 5304 | // Unmap buffer 2.
|
| 5305 | vmaUnmapMemory(g_hAllocator, bufferInfos[2].Allocation);
|
| 5306 | vmaGetAllocationInfo(g_hAllocator, bufferInfos[2].Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5307 | TEST(allocInfo.pMappedData == data2);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5308 |
|
| 5309 | // Destroy all buffers.
|
| 5310 | for(size_t i = 3; i--; )
|
| 5311 | vmaDestroyBuffer(g_hAllocator, bufferInfos[i].Buffer, bufferInfos[i].Allocation);
|
| 5312 |
|
| 5313 | vmaDestroyPool(g_hAllocator, pool);
|
| 5314 | }
|
| 5315 | }
|
| 5316 |
|
Adam Sawicki | daa6a55 | 2019-06-25 15:26:37 +0200 | [diff] [blame] | 5317 | // Test CREATE_MAPPED with required DEVICE_LOCAL. There was a bug with it.
|
| 5318 | static void TestDeviceLocalMapped()
|
| 5319 | {
|
| 5320 | VkResult res;
|
| 5321 |
|
| 5322 | for(uint32_t testIndex = 0; testIndex < 3; ++testIndex)
|
| 5323 | {
|
| 5324 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 5325 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
| 5326 | bufCreateInfo.size = 4096;
|
| 5327 |
|
| 5328 | VmaPool pool = VK_NULL_HANDLE;
|
| 5329 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 5330 | allocCreateInfo.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
|
| 5331 | allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
|
| 5332 | if(testIndex == 2)
|
| 5333 | {
|
| 5334 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 5335 | res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &poolCreateInfo.memoryTypeIndex);
|
| 5336 | TEST(res == VK_SUCCESS);
|
| 5337 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
|
| 5338 | TEST(res == VK_SUCCESS);
|
| 5339 | allocCreateInfo.pool = pool;
|
| 5340 | }
|
| 5341 | else if(testIndex == 1)
|
| 5342 | {
|
| 5343 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_CAN_MAKE_OTHER_LOST_BIT;
|
| 5344 | }
|
| 5345 |
|
| 5346 | VkBuffer buf = VK_NULL_HANDLE;
|
| 5347 | VmaAllocation alloc = VK_NULL_HANDLE;
|
| 5348 | VmaAllocationInfo allocInfo = {};
|
| 5349 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo);
|
| 5350 | TEST(res == VK_SUCCESS && alloc);
|
| 5351 |
|
| 5352 | VkMemoryPropertyFlags memTypeFlags = 0;
|
| 5353 | vmaGetMemoryTypeProperties(g_hAllocator, allocInfo.memoryType, &memTypeFlags);
|
| 5354 | const bool shouldBeMapped = (memTypeFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
|
| 5355 | TEST((allocInfo.pMappedData != nullptr) == shouldBeMapped);
|
| 5356 |
|
| 5357 | vmaDestroyBuffer(g_hAllocator, buf, alloc);
|
| 5358 | vmaDestroyPool(g_hAllocator, pool);
|
| 5359 | }
|
| 5360 | }
|
| 5361 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5362 | static void TestMappingMultithreaded()
|
| 5363 | {
|
| 5364 | wprintf(L"Testing mapping multithreaded...\n");
|
| 5365 |
|
| 5366 | static const uint32_t threadCount = 16;
|
| 5367 | static const uint32_t bufferCount = 1024;
|
| 5368 | static const uint32_t threadBufferCount = bufferCount / threadCount;
|
| 5369 |
|
| 5370 | VkResult res;
|
| 5371 | volatile uint32_t memTypeIndex = UINT32_MAX;
|
| 5372 |
|
| 5373 | enum TEST
|
| 5374 | {
|
| 5375 | TEST_NORMAL,
|
| 5376 | TEST_POOL,
|
| 5377 | TEST_DEDICATED,
|
| 5378 | TEST_COUNT
|
| 5379 | };
|
| 5380 | for(uint32_t testIndex = 0; testIndex < TEST_COUNT; ++testIndex)
|
| 5381 | {
|
| 5382 | VmaPool pool = nullptr;
|
| 5383 | if(testIndex == TEST_POOL)
|
| 5384 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5385 | TEST(memTypeIndex != UINT32_MAX);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5386 | VmaPoolCreateInfo poolInfo = {};
|
| 5387 | poolInfo.memoryTypeIndex = memTypeIndex;
|
| 5388 | res = vmaCreatePool(g_hAllocator, &poolInfo, &pool);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5389 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5390 | }
|
| 5391 |
|
| 5392 | VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 5393 | bufCreateInfo.size = 0x10000;
|
| 5394 | bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 5395 |
|
| 5396 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 5397 | allocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 5398 | allocCreateInfo.pool = pool;
|
| 5399 | if(testIndex == TEST_DEDICATED)
|
| 5400 | allocCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
|
| 5401 |
|
| 5402 | std::thread threads[threadCount];
|
| 5403 | for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
|
| 5404 | {
|
| 5405 | threads[threadIndex] = std::thread([=, &memTypeIndex](){
|
| 5406 | // ======== THREAD FUNCTION ========
|
| 5407 |
|
| 5408 | RandomNumberGenerator rand{threadIndex};
|
| 5409 |
|
| 5410 | enum class MODE
|
| 5411 | {
|
| 5412 | // Don't map this buffer at all.
|
| 5413 | DONT_MAP,
|
| 5414 | // Map and quickly unmap.
|
| 5415 | MAP_FOR_MOMENT,
|
| 5416 | // Map and unmap before destruction.
|
| 5417 | MAP_FOR_LONGER,
|
| 5418 | // Map two times. Quickly unmap, second unmap before destruction.
|
| 5419 | MAP_TWO_TIMES,
|
| 5420 | // Create this buffer as persistently mapped.
|
| 5421 | PERSISTENTLY_MAPPED,
|
| 5422 | COUNT
|
| 5423 | };
|
| 5424 | std::vector<BufferInfo> bufInfos{threadBufferCount};
|
| 5425 | std::vector<MODE> bufModes{threadBufferCount};
|
| 5426 |
|
| 5427 | for(uint32_t bufferIndex = 0; bufferIndex < threadBufferCount; ++bufferIndex)
|
| 5428 | {
|
| 5429 | BufferInfo& bufInfo = bufInfos[bufferIndex];
|
| 5430 | const MODE mode = (MODE)(rand.Generate() % (uint32_t)MODE::COUNT);
|
| 5431 | bufModes[bufferIndex] = mode;
|
| 5432 |
|
| 5433 | VmaAllocationCreateInfo localAllocCreateInfo = allocCreateInfo;
|
| 5434 | if(mode == MODE::PERSISTENTLY_MAPPED)
|
| 5435 | localAllocCreateInfo.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT;
|
| 5436 |
|
| 5437 | VmaAllocationInfo allocInfo;
|
| 5438 | VkResult res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &localAllocCreateInfo,
|
| 5439 | &bufInfo.Buffer, &bufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5440 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5441 |
|
| 5442 | if(memTypeIndex == UINT32_MAX)
|
| 5443 | memTypeIndex = allocInfo.memoryType;
|
| 5444 |
|
| 5445 | char* data = nullptr;
|
| 5446 |
|
| 5447 | if(mode == MODE::PERSISTENTLY_MAPPED)
|
| 5448 | {
|
| 5449 | data = (char*)allocInfo.pMappedData;
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5450 | TEST(data != nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5451 | }
|
| 5452 | else if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_FOR_LONGER ||
|
| 5453 | mode == MODE::MAP_TWO_TIMES)
|
| 5454 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5455 | TEST(data == nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5456 | res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5457 | TEST(res == VK_SUCCESS && data != nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5458 |
|
| 5459 | if(mode == MODE::MAP_TWO_TIMES)
|
| 5460 | {
|
| 5461 | char* data2 = nullptr;
|
| 5462 | res = vmaMapMemory(g_hAllocator, bufInfo.Allocation, (void**)&data2);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5463 | TEST(res == VK_SUCCESS && data2 == data);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5464 | }
|
| 5465 | }
|
| 5466 | else if(mode == MODE::DONT_MAP)
|
| 5467 | {
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5468 | TEST(allocInfo.pMappedData == nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5469 | }
|
| 5470 | else
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5471 | TEST(0);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5472 |
|
| 5473 | // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
|
| 5474 | if(data)
|
| 5475 | data[0xFFFF] = data[0];
|
| 5476 |
|
| 5477 | if(mode == MODE::MAP_FOR_MOMENT || mode == MODE::MAP_TWO_TIMES)
|
| 5478 | {
|
| 5479 | vmaUnmapMemory(g_hAllocator, bufInfo.Allocation);
|
| 5480 |
|
| 5481 | VmaAllocationInfo allocInfo;
|
| 5482 | vmaGetAllocationInfo(g_hAllocator, bufInfo.Allocation, &allocInfo);
|
| 5483 | if(mode == MODE::MAP_FOR_MOMENT)
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5484 | TEST(allocInfo.pMappedData == nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5485 | else
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5486 | TEST(allocInfo.pMappedData == data);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5487 | }
|
| 5488 |
|
| 5489 | switch(rand.Generate() % 3)
|
| 5490 | {
|
| 5491 | case 0: Sleep(0); break; // Yield.
|
| 5492 | case 1: Sleep(10); break; // 10 ms
|
| 5493 | // default: No sleep.
|
| 5494 | }
|
| 5495 |
|
| 5496 | // Test if reading and writing from the beginning and end of mapped memory doesn't crash.
|
| 5497 | if(data)
|
| 5498 | data[0xFFFF] = data[0];
|
| 5499 | }
|
| 5500 |
|
| 5501 | for(size_t bufferIndex = threadBufferCount; bufferIndex--; )
|
| 5502 | {
|
| 5503 | if(bufModes[bufferIndex] == MODE::MAP_FOR_LONGER ||
|
| 5504 | bufModes[bufferIndex] == MODE::MAP_TWO_TIMES)
|
| 5505 | {
|
| 5506 | vmaUnmapMemory(g_hAllocator, bufInfos[bufferIndex].Allocation);
|
| 5507 |
|
| 5508 | VmaAllocationInfo allocInfo;
|
| 5509 | vmaGetAllocationInfo(g_hAllocator, bufInfos[bufferIndex].Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5510 | TEST(allocInfo.pMappedData == nullptr);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5511 | }
|
| 5512 |
|
| 5513 | vmaDestroyBuffer(g_hAllocator, bufInfos[bufferIndex].Buffer, bufInfos[bufferIndex].Allocation);
|
| 5514 | }
|
| 5515 | });
|
| 5516 | }
|
| 5517 |
|
| 5518 | for(uint32_t threadIndex = 0; threadIndex < threadCount; ++threadIndex)
|
| 5519 | threads[threadIndex].join();
|
| 5520 |
|
| 5521 | vmaDestroyPool(g_hAllocator, pool);
|
| 5522 | }
|
| 5523 | }
|
| 5524 |
|
| 5525 | static void WriteMainTestResultHeader(FILE* file)
|
| 5526 | {
|
| 5527 | fprintf(file,
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5528 | "Code,Time,"
|
| 5529 | "Threads,Buffers and images,Sizes,Operations,Allocation strategy,Free order,"
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5530 | "Total Time (us),"
|
| 5531 | "Allocation Time Min (us),"
|
| 5532 | "Allocation Time Avg (us),"
|
| 5533 | "Allocation Time Max (us),"
|
| 5534 | "Deallocation Time Min (us),"
|
| 5535 | "Deallocation Time Avg (us),"
|
| 5536 | "Deallocation Time Max (us),"
|
| 5537 | "Total Memory Allocated (B),"
|
| 5538 | "Free Range Size Avg (B),"
|
| 5539 | "Free Range Size Max (B)\n");
|
| 5540 | }
|
| 5541 |
|
| 5542 | static void WriteMainTestResult(
|
| 5543 | FILE* file,
|
| 5544 | const char* codeDescription,
|
| 5545 | const char* testDescription,
|
| 5546 | const Config& config, const Result& result)
|
| 5547 | {
|
| 5548 | float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
|
| 5549 | float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
|
| 5550 | float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
|
| 5551 | float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
|
| 5552 | float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
|
| 5553 | float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
|
| 5554 | float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
|
| 5555 |
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 5556 | std::string currTime;
|
| 5557 | CurrentTimeToStr(currTime);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5558 |
|
| 5559 | fprintf(file,
|
| 5560 | "%s,%s,%s,"
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5561 | "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u\n",
|
| 5562 | codeDescription,
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 5563 | currTime.c_str(),
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5564 | testDescription,
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5565 | totalTimeSeconds * 1e6f,
|
| 5566 | allocationTimeMinSeconds * 1e6f,
|
| 5567 | allocationTimeAvgSeconds * 1e6f,
|
| 5568 | allocationTimeMaxSeconds * 1e6f,
|
| 5569 | deallocationTimeMinSeconds * 1e6f,
|
| 5570 | deallocationTimeAvgSeconds * 1e6f,
|
| 5571 | deallocationTimeMaxSeconds * 1e6f,
|
| 5572 | result.TotalMemoryAllocated,
|
| 5573 | result.FreeRangeSizeAvg,
|
| 5574 | result.FreeRangeSizeMax);
|
| 5575 | }
|
| 5576 |
|
| 5577 | static void WritePoolTestResultHeader(FILE* file)
|
| 5578 | {
|
| 5579 | fprintf(file,
|
| 5580 | "Code,Test,Time,"
|
| 5581 | "Config,"
|
| 5582 | "Total Time (us),"
|
| 5583 | "Allocation Time Min (us),"
|
| 5584 | "Allocation Time Avg (us),"
|
| 5585 | "Allocation Time Max (us),"
|
| 5586 | "Deallocation Time Min (us),"
|
| 5587 | "Deallocation Time Avg (us),"
|
| 5588 | "Deallocation Time Max (us),"
|
| 5589 | "Lost Allocation Count,"
|
| 5590 | "Lost Allocation Total Size (B),"
|
| 5591 | "Failed Allocation Count,"
|
| 5592 | "Failed Allocation Total Size (B)\n");
|
| 5593 | }
|
| 5594 |
|
| 5595 | static void WritePoolTestResult(
|
| 5596 | FILE* file,
|
| 5597 | const char* codeDescription,
|
| 5598 | const char* testDescription,
|
| 5599 | const PoolTestConfig& config,
|
| 5600 | const PoolTestResult& result)
|
| 5601 | {
|
| 5602 | float totalTimeSeconds = ToFloatSeconds(result.TotalTime);
|
| 5603 | float allocationTimeMinSeconds = ToFloatSeconds(result.AllocationTimeMin);
|
| 5604 | float allocationTimeAvgSeconds = ToFloatSeconds(result.AllocationTimeAvg);
|
| 5605 | float allocationTimeMaxSeconds = ToFloatSeconds(result.AllocationTimeMax);
|
| 5606 | float deallocationTimeMinSeconds = ToFloatSeconds(result.DeallocationTimeMin);
|
| 5607 | float deallocationTimeAvgSeconds = ToFloatSeconds(result.DeallocationTimeAvg);
|
| 5608 | float deallocationTimeMaxSeconds = ToFloatSeconds(result.DeallocationTimeMax);
|
| 5609 |
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 5610 | std::string currTime;
|
| 5611 | CurrentTimeToStr(currTime);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5612 |
|
| 5613 | fprintf(file,
|
| 5614 | "%s,%s,%s,"
|
| 5615 | "ThreadCount=%u PoolSize=%llu FrameCount=%u TotalItemCount=%u UsedItemCount=%u...%u ItemsToMakeUnusedPercent=%u,"
|
| 5616 | "%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%.2f,%I64u,%I64u,%I64u,%I64u\n",
|
| 5617 | // General
|
| 5618 | codeDescription,
|
| 5619 | testDescription,
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 5620 | currTime.c_str(),
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5621 | // Config
|
| 5622 | config.ThreadCount,
|
| 5623 | (unsigned long long)config.PoolSize,
|
| 5624 | config.FrameCount,
|
| 5625 | config.TotalItemCount,
|
| 5626 | config.UsedItemCountMin,
|
| 5627 | config.UsedItemCountMax,
|
| 5628 | config.ItemsToMakeUnusedPercent,
|
| 5629 | // Results
|
| 5630 | totalTimeSeconds * 1e6f,
|
| 5631 | allocationTimeMinSeconds * 1e6f,
|
| 5632 | allocationTimeAvgSeconds * 1e6f,
|
| 5633 | allocationTimeMaxSeconds * 1e6f,
|
| 5634 | deallocationTimeMinSeconds * 1e6f,
|
| 5635 | deallocationTimeAvgSeconds * 1e6f,
|
| 5636 | deallocationTimeMaxSeconds * 1e6f,
|
| 5637 | result.LostAllocationCount,
|
| 5638 | result.LostAllocationTotalSize,
|
| 5639 | result.FailedAllocationCount,
|
| 5640 | result.FailedAllocationTotalSize);
|
| 5641 | }
|
| 5642 |
|
| 5643 | static void PerformCustomMainTest(FILE* file)
|
| 5644 | {
|
| 5645 | Config config{};
|
| 5646 | config.RandSeed = 65735476;
|
| 5647 | //config.MaxBytesToAllocate = 4ull * 1024 * 1024; // 4 MB
|
| 5648 | config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
|
| 5649 | config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
|
| 5650 | config.FreeOrder = FREE_ORDER::FORWARD;
|
| 5651 | config.ThreadCount = 16;
|
| 5652 | config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 5653 | config.AllocationStrategy = 0;
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5654 |
|
| 5655 | // Buffers
|
| 5656 | //config.AllocationSizes.push_back({4, 16, 1024});
|
| 5657 | config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
|
| 5658 |
|
| 5659 | // Images
|
| 5660 | //config.AllocationSizes.push_back({4, 0, 0, 4, 32});
|
| 5661 | //config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
|
| 5662 |
|
| 5663 | config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
|
| 5664 | config.AdditionalOperationCount = 1024;
|
| 5665 |
|
| 5666 | Result result{};
|
| 5667 | VkResult res = MainTest(result, config);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5668 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5669 | WriteMainTestResult(file, "Foo", "CustomTest", config, result);
|
| 5670 | }
|
| 5671 |
|
| 5672 | static void PerformCustomPoolTest(FILE* file)
|
| 5673 | {
|
| 5674 | PoolTestConfig config;
|
| 5675 | config.PoolSize = 100 * 1024 * 1024;
|
| 5676 | config.RandSeed = 2345764;
|
| 5677 | config.ThreadCount = 1;
|
| 5678 | config.FrameCount = 200;
|
| 5679 | config.ItemsToMakeUnusedPercent = 2;
|
| 5680 |
|
| 5681 | AllocationSize allocSize = {};
|
| 5682 | allocSize.BufferSizeMin = 1024;
|
| 5683 | allocSize.BufferSizeMax = 1024 * 1024;
|
| 5684 | allocSize.Probability = 1;
|
| 5685 | config.AllocationSizes.push_back(allocSize);
|
| 5686 |
|
| 5687 | allocSize.BufferSizeMin = 0;
|
| 5688 | allocSize.BufferSizeMax = 0;
|
| 5689 | allocSize.ImageSizeMin = 128;
|
| 5690 | allocSize.ImageSizeMax = 1024;
|
| 5691 | allocSize.Probability = 1;
|
| 5692 | config.AllocationSizes.push_back(allocSize);
|
| 5693 |
|
| 5694 | config.PoolSize = config.CalcAvgResourceSize() * 200;
|
| 5695 | config.UsedItemCountMax = 160;
|
| 5696 | config.TotalItemCount = config.UsedItemCountMax * 10;
|
| 5697 | config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
|
| 5698 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5699 | PoolTestResult result = {};
|
| 5700 | TestPool_Benchmark(result, config);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5701 |
|
| 5702 | WritePoolTestResult(file, "Code desc", "Test desc", config, result);
|
| 5703 | }
|
| 5704 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5705 | static void PerformMainTests(FILE* file)
|
| 5706 | {
|
| 5707 | uint32_t repeatCount = 1;
|
| 5708 | if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
|
| 5709 |
|
| 5710 | Config config{};
|
| 5711 | config.RandSeed = 65735476;
|
| 5712 | config.MemUsageProbability[0] = 1; // VMA_MEMORY_USAGE_GPU_ONLY
|
| 5713 | config.FreeOrder = FREE_ORDER::FORWARD;
|
| 5714 |
|
| 5715 | size_t threadCountCount = 1;
|
| 5716 | switch(ConfigType)
|
| 5717 | {
|
| 5718 | case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
|
| 5719 | case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
|
| 5720 | case CONFIG_TYPE_AVERAGE: threadCountCount = 3; break;
|
| 5721 | case CONFIG_TYPE_LARGE: threadCountCount = 5; break;
|
| 5722 | case CONFIG_TYPE_MAXIMUM: threadCountCount = 7; break;
|
| 5723 | default: assert(0);
|
| 5724 | }
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 5725 |
|
Adam Sawicki | 0a3fb6c | 2018-08-27 14:40:27 +0200 | [diff] [blame] | 5726 | const size_t strategyCount = GetAllocationStrategyCount();
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 5727 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5728 | for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
|
| 5729 | {
|
| 5730 | std::string desc1;
|
| 5731 |
|
| 5732 | switch(threadCountIndex)
|
| 5733 | {
|
| 5734 | case 0:
|
| 5735 | desc1 += "1_thread";
|
| 5736 | config.ThreadCount = 1;
|
| 5737 | config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
|
| 5738 | break;
|
| 5739 | case 1:
|
| 5740 | desc1 += "16_threads+0%_common";
|
| 5741 | config.ThreadCount = 16;
|
| 5742 | config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
|
| 5743 | break;
|
| 5744 | case 2:
|
| 5745 | desc1 += "16_threads+50%_common";
|
| 5746 | config.ThreadCount = 16;
|
| 5747 | config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
|
| 5748 | break;
|
| 5749 | case 3:
|
| 5750 | desc1 += "16_threads+100%_common";
|
| 5751 | config.ThreadCount = 16;
|
| 5752 | config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
|
| 5753 | break;
|
| 5754 | case 4:
|
| 5755 | desc1 += "2_threads+0%_common";
|
| 5756 | config.ThreadCount = 2;
|
| 5757 | config.ThreadsUsingCommonAllocationsProbabilityPercent = 0;
|
| 5758 | break;
|
| 5759 | case 5:
|
| 5760 | desc1 += "2_threads+50%_common";
|
| 5761 | config.ThreadCount = 2;
|
| 5762 | config.ThreadsUsingCommonAllocationsProbabilityPercent = 50;
|
| 5763 | break;
|
| 5764 | case 6:
|
| 5765 | desc1 += "2_threads+100%_common";
|
| 5766 | config.ThreadCount = 2;
|
| 5767 | config.ThreadsUsingCommonAllocationsProbabilityPercent = 100;
|
| 5768 | break;
|
| 5769 | default:
|
| 5770 | assert(0);
|
| 5771 | }
|
| 5772 |
|
| 5773 | // 0 = buffers, 1 = images, 2 = buffers and images
|
| 5774 | size_t buffersVsImagesCount = 2;
|
| 5775 | if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
|
| 5776 | for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
|
| 5777 | {
|
| 5778 | std::string desc2 = desc1;
|
| 5779 | switch(buffersVsImagesIndex)
|
| 5780 | {
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5781 | case 0: desc2 += ",Buffers"; break;
|
| 5782 | case 1: desc2 += ",Images"; break;
|
| 5783 | case 2: desc2 += ",Buffers+Images"; break;
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5784 | default: assert(0);
|
| 5785 | }
|
| 5786 |
|
| 5787 | // 0 = small, 1 = large, 2 = small and large
|
| 5788 | size_t smallVsLargeCount = 2;
|
| 5789 | if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
|
| 5790 | for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
|
| 5791 | {
|
| 5792 | std::string desc3 = desc2;
|
| 5793 | switch(smallVsLargeIndex)
|
| 5794 | {
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5795 | case 0: desc3 += ",Small"; break;
|
| 5796 | case 1: desc3 += ",Large"; break;
|
| 5797 | case 2: desc3 += ",Small+Large"; break;
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5798 | default: assert(0);
|
| 5799 | }
|
| 5800 |
|
| 5801 | if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
|
| 5802 | config.MaxBytesToAllocate = 4ull * 1024 * 1024 * 1024; // 4 GB
|
| 5803 | else
|
| 5804 | config.MaxBytesToAllocate = 4ull * 1024 * 1024;
|
| 5805 |
|
| 5806 | // 0 = varying sizes min...max, 1 = set of constant sizes
|
| 5807 | size_t constantSizesCount = 1;
|
| 5808 | if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
|
| 5809 | for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
|
| 5810 | {
|
| 5811 | std::string desc4 = desc3;
|
| 5812 | switch(constantSizesIndex)
|
| 5813 | {
|
| 5814 | case 0: desc4 += " Varying_sizes"; break;
|
| 5815 | case 1: desc4 += " Constant_sizes"; break;
|
| 5816 | default: assert(0);
|
| 5817 | }
|
| 5818 |
|
| 5819 | config.AllocationSizes.clear();
|
| 5820 | // Buffers present
|
| 5821 | if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
|
| 5822 | {
|
| 5823 | // Small
|
| 5824 | if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
|
| 5825 | {
|
| 5826 | // Varying size
|
| 5827 | if(constantSizesIndex == 0)
|
| 5828 | config.AllocationSizes.push_back({4, 16, 1024});
|
| 5829 | // Constant sizes
|
| 5830 | else
|
| 5831 | {
|
| 5832 | config.AllocationSizes.push_back({1, 16, 16});
|
| 5833 | config.AllocationSizes.push_back({1, 64, 64});
|
| 5834 | config.AllocationSizes.push_back({1, 256, 256});
|
| 5835 | config.AllocationSizes.push_back({1, 1024, 1024});
|
| 5836 | }
|
| 5837 | }
|
| 5838 | // Large
|
| 5839 | if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
|
| 5840 | {
|
| 5841 | // Varying size
|
| 5842 | if(constantSizesIndex == 0)
|
| 5843 | config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
|
| 5844 | // Constant sizes
|
| 5845 | else
|
| 5846 | {
|
| 5847 | config.AllocationSizes.push_back({1, 0x10000, 0x10000});
|
| 5848 | config.AllocationSizes.push_back({1, 0x80000, 0x80000});
|
| 5849 | config.AllocationSizes.push_back({1, 0x200000, 0x200000});
|
| 5850 | config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
|
| 5851 | }
|
| 5852 | }
|
| 5853 | }
|
| 5854 | // Images present
|
| 5855 | if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
|
| 5856 | {
|
| 5857 | // Small
|
| 5858 | if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
|
| 5859 | {
|
| 5860 | // Varying size
|
| 5861 | if(constantSizesIndex == 0)
|
| 5862 | config.AllocationSizes.push_back({4, 0, 0, 4, 32});
|
| 5863 | // Constant sizes
|
| 5864 | else
|
| 5865 | {
|
| 5866 | config.AllocationSizes.push_back({1, 0, 0, 4, 4});
|
| 5867 | config.AllocationSizes.push_back({1, 0, 0, 8, 8});
|
| 5868 | config.AllocationSizes.push_back({1, 0, 0, 16, 16});
|
| 5869 | config.AllocationSizes.push_back({1, 0, 0, 32, 32});
|
| 5870 | }
|
| 5871 | }
|
| 5872 | // Large
|
| 5873 | if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
|
| 5874 | {
|
| 5875 | // Varying size
|
| 5876 | if(constantSizesIndex == 0)
|
| 5877 | config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
|
| 5878 | // Constant sizes
|
| 5879 | else
|
| 5880 | {
|
| 5881 | config.AllocationSizes.push_back({1, 0, 0, 256, 256});
|
| 5882 | config.AllocationSizes.push_back({1, 0, 0, 512, 512});
|
| 5883 | config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
|
| 5884 | config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
|
| 5885 | }
|
| 5886 | }
|
| 5887 | }
|
| 5888 |
|
| 5889 | // 0 = 100%, additional_operations = 0, 1 = 50%, 2 = 5%, 3 = 95% additional_operations = a lot
|
| 5890 | size_t beginBytesToAllocateCount = 1;
|
| 5891 | if(ConfigType >= CONFIG_TYPE_SMALL) ++beginBytesToAllocateCount;
|
| 5892 | if(ConfigType >= CONFIG_TYPE_AVERAGE) ++beginBytesToAllocateCount;
|
| 5893 | if(ConfigType >= CONFIG_TYPE_LARGE) ++beginBytesToAllocateCount;
|
| 5894 | for(size_t beginBytesToAllocateIndex = 0; beginBytesToAllocateIndex < beginBytesToAllocateCount; ++beginBytesToAllocateIndex)
|
| 5895 | {
|
| 5896 | std::string desc5 = desc4;
|
| 5897 |
|
| 5898 | switch(beginBytesToAllocateIndex)
|
| 5899 | {
|
| 5900 | case 0:
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5901 | desc5 += ",Allocate_100%";
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5902 | config.BeginBytesToAllocate = config.MaxBytesToAllocate;
|
| 5903 | config.AdditionalOperationCount = 0;
|
| 5904 | break;
|
| 5905 | case 1:
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5906 | desc5 += ",Allocate_50%+Operations";
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5907 | config.BeginBytesToAllocate = config.MaxBytesToAllocate * 50 / 100;
|
| 5908 | config.AdditionalOperationCount = 1024;
|
| 5909 | break;
|
| 5910 | case 2:
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5911 | desc5 += ",Allocate_5%+Operations";
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5912 | config.BeginBytesToAllocate = config.MaxBytesToAllocate * 5 / 100;
|
| 5913 | config.AdditionalOperationCount = 1024;
|
| 5914 | break;
|
| 5915 | case 3:
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5916 | desc5 += ",Allocate_95%+Operations";
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5917 | config.BeginBytesToAllocate = config.MaxBytesToAllocate * 95 / 100;
|
| 5918 | config.AdditionalOperationCount = 1024;
|
| 5919 | break;
|
| 5920 | default:
|
| 5921 | assert(0);
|
| 5922 | }
|
| 5923 |
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 5924 | for(size_t strategyIndex = 0; strategyIndex < strategyCount; ++strategyIndex)
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5925 | {
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 5926 | std::string desc6 = desc5;
|
| 5927 | switch(strategyIndex)
|
| 5928 | {
|
| 5929 | case 0:
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5930 | desc6 += ",BestFit";
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 5931 | config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT;
|
| 5932 | break;
|
| 5933 | case 1:
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5934 | desc6 += ",WorstFit";
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 5935 | config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_WORST_FIT_BIT;
|
| 5936 | break;
|
| 5937 | case 2:
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5938 | desc6 += ",FirstFit";
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 5939 | config.AllocationStrategy = VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT;
|
| 5940 | break;
|
| 5941 | default:
|
| 5942 | assert(0);
|
| 5943 | }
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5944 |
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 5945 | desc6 += ',';
|
| 5946 | desc6 += FREE_ORDER_NAMES[(uint32_t)config.FreeOrder];
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5947 |
|
| 5948 | const char* testDescription = desc6.c_str();
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 5949 |
|
| 5950 | for(size_t repeat = 0; repeat < repeatCount; ++repeat)
|
| 5951 | {
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5952 | printf("%s #%u\n", testDescription, (uint32_t)repeat);
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 5953 |
|
| 5954 | Result result{};
|
| 5955 | VkResult res = MainTest(result, config);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 5956 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 5957 | if(file)
|
| 5958 | {
|
| 5959 | WriteMainTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
|
| 5960 | }
|
Adam Sawicki | 0667e33 | 2018-08-24 17:26:44 +0200 | [diff] [blame] | 5961 | }
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 5962 | }
|
| 5963 | }
|
| 5964 | }
|
| 5965 | }
|
| 5966 | }
|
| 5967 | }
|
| 5968 | }
|
| 5969 |
|
| 5970 | static void PerformPoolTests(FILE* file)
|
| 5971 | {
|
| 5972 | const size_t AVG_RESOURCES_PER_POOL = 300;
|
| 5973 |
|
| 5974 | uint32_t repeatCount = 1;
|
| 5975 | if(ConfigType >= CONFIG_TYPE_MAXIMUM) repeatCount = 3;
|
| 5976 |
|
| 5977 | PoolTestConfig config{};
|
| 5978 | config.RandSeed = 2346343;
|
| 5979 | config.FrameCount = 200;
|
| 5980 | config.ItemsToMakeUnusedPercent = 2;
|
| 5981 |
|
| 5982 | size_t threadCountCount = 1;
|
| 5983 | switch(ConfigType)
|
| 5984 | {
|
| 5985 | case CONFIG_TYPE_MINIMUM: threadCountCount = 1; break;
|
| 5986 | case CONFIG_TYPE_SMALL: threadCountCount = 2; break;
|
| 5987 | case CONFIG_TYPE_AVERAGE: threadCountCount = 2; break;
|
| 5988 | case CONFIG_TYPE_LARGE: threadCountCount = 3; break;
|
| 5989 | case CONFIG_TYPE_MAXIMUM: threadCountCount = 3; break;
|
| 5990 | default: assert(0);
|
| 5991 | }
|
| 5992 | for(size_t threadCountIndex = 0; threadCountIndex < threadCountCount; ++threadCountIndex)
|
| 5993 | {
|
| 5994 | std::string desc1;
|
| 5995 |
|
| 5996 | switch(threadCountIndex)
|
| 5997 | {
|
| 5998 | case 0:
|
| 5999 | desc1 += "1_thread";
|
| 6000 | config.ThreadCount = 1;
|
| 6001 | break;
|
| 6002 | case 1:
|
| 6003 | desc1 += "16_threads";
|
| 6004 | config.ThreadCount = 16;
|
| 6005 | break;
|
| 6006 | case 2:
|
| 6007 | desc1 += "2_threads";
|
| 6008 | config.ThreadCount = 2;
|
| 6009 | break;
|
| 6010 | default:
|
| 6011 | assert(0);
|
| 6012 | }
|
| 6013 |
|
| 6014 | // 0 = buffers, 1 = images, 2 = buffers and images
|
| 6015 | size_t buffersVsImagesCount = 2;
|
| 6016 | if(ConfigType >= CONFIG_TYPE_LARGE) ++buffersVsImagesCount;
|
| 6017 | for(size_t buffersVsImagesIndex = 0; buffersVsImagesIndex < buffersVsImagesCount; ++buffersVsImagesIndex)
|
| 6018 | {
|
| 6019 | std::string desc2 = desc1;
|
| 6020 | switch(buffersVsImagesIndex)
|
| 6021 | {
|
| 6022 | case 0: desc2 += " Buffers"; break;
|
| 6023 | case 1: desc2 += " Images"; break;
|
| 6024 | case 2: desc2 += " Buffers+Images"; break;
|
| 6025 | default: assert(0);
|
| 6026 | }
|
| 6027 |
|
| 6028 | // 0 = small, 1 = large, 2 = small and large
|
| 6029 | size_t smallVsLargeCount = 2;
|
| 6030 | if(ConfigType >= CONFIG_TYPE_LARGE) ++smallVsLargeCount;
|
| 6031 | for(size_t smallVsLargeIndex = 0; smallVsLargeIndex < smallVsLargeCount; ++smallVsLargeIndex)
|
| 6032 | {
|
| 6033 | std::string desc3 = desc2;
|
| 6034 | switch(smallVsLargeIndex)
|
| 6035 | {
|
| 6036 | case 0: desc3 += " Small"; break;
|
| 6037 | case 1: desc3 += " Large"; break;
|
| 6038 | case 2: desc3 += " Small+Large"; break;
|
| 6039 | default: assert(0);
|
| 6040 | }
|
| 6041 |
|
| 6042 | if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
|
| 6043 | config.PoolSize = 6ull * 1024 * 1024 * 1024; // 6 GB
|
| 6044 | else
|
| 6045 | config.PoolSize = 4ull * 1024 * 1024;
|
| 6046 |
|
| 6047 | // 0 = varying sizes min...max, 1 = set of constant sizes
|
| 6048 | size_t constantSizesCount = 1;
|
| 6049 | if(ConfigType >= CONFIG_TYPE_SMALL) ++constantSizesCount;
|
| 6050 | for(size_t constantSizesIndex = 0; constantSizesIndex < constantSizesCount; ++constantSizesIndex)
|
| 6051 | {
|
| 6052 | std::string desc4 = desc3;
|
| 6053 | switch(constantSizesIndex)
|
| 6054 | {
|
| 6055 | case 0: desc4 += " Varying_sizes"; break;
|
| 6056 | case 1: desc4 += " Constant_sizes"; break;
|
| 6057 | default: assert(0);
|
| 6058 | }
|
| 6059 |
|
| 6060 | config.AllocationSizes.clear();
|
| 6061 | // Buffers present
|
| 6062 | if(buffersVsImagesIndex == 0 || buffersVsImagesIndex == 2)
|
| 6063 | {
|
| 6064 | // Small
|
| 6065 | if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
|
| 6066 | {
|
| 6067 | // Varying size
|
| 6068 | if(constantSizesIndex == 0)
|
| 6069 | config.AllocationSizes.push_back({4, 16, 1024});
|
| 6070 | // Constant sizes
|
| 6071 | else
|
| 6072 | {
|
| 6073 | config.AllocationSizes.push_back({1, 16, 16});
|
| 6074 | config.AllocationSizes.push_back({1, 64, 64});
|
| 6075 | config.AllocationSizes.push_back({1, 256, 256});
|
| 6076 | config.AllocationSizes.push_back({1, 1024, 1024});
|
| 6077 | }
|
| 6078 | }
|
| 6079 | // Large
|
| 6080 | if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
|
| 6081 | {
|
| 6082 | // Varying size
|
| 6083 | if(constantSizesIndex == 0)
|
| 6084 | config.AllocationSizes.push_back({4, 0x10000, 0xA00000}); // 64 KB ... 10 MB
|
| 6085 | // Constant sizes
|
| 6086 | else
|
| 6087 | {
|
| 6088 | config.AllocationSizes.push_back({1, 0x10000, 0x10000});
|
| 6089 | config.AllocationSizes.push_back({1, 0x80000, 0x80000});
|
| 6090 | config.AllocationSizes.push_back({1, 0x200000, 0x200000});
|
| 6091 | config.AllocationSizes.push_back({1, 0xA00000, 0xA00000});
|
| 6092 | }
|
| 6093 | }
|
| 6094 | }
|
| 6095 | // Images present
|
| 6096 | if(buffersVsImagesIndex == 1 || buffersVsImagesIndex == 2)
|
| 6097 | {
|
| 6098 | // Small
|
| 6099 | if(smallVsLargeIndex == 0 || smallVsLargeIndex == 2)
|
| 6100 | {
|
| 6101 | // Varying size
|
| 6102 | if(constantSizesIndex == 0)
|
| 6103 | config.AllocationSizes.push_back({4, 0, 0, 4, 32});
|
| 6104 | // Constant sizes
|
| 6105 | else
|
| 6106 | {
|
| 6107 | config.AllocationSizes.push_back({1, 0, 0, 4, 4});
|
| 6108 | config.AllocationSizes.push_back({1, 0, 0, 8, 8});
|
| 6109 | config.AllocationSizes.push_back({1, 0, 0, 16, 16});
|
| 6110 | config.AllocationSizes.push_back({1, 0, 0, 32, 32});
|
| 6111 | }
|
| 6112 | }
|
| 6113 | // Large
|
| 6114 | if(smallVsLargeIndex == 1 || smallVsLargeIndex == 2)
|
| 6115 | {
|
| 6116 | // Varying size
|
| 6117 | if(constantSizesIndex == 0)
|
| 6118 | config.AllocationSizes.push_back({4, 0, 0, 256, 2048});
|
| 6119 | // Constant sizes
|
| 6120 | else
|
| 6121 | {
|
| 6122 | config.AllocationSizes.push_back({1, 0, 0, 256, 256});
|
| 6123 | config.AllocationSizes.push_back({1, 0, 0, 512, 512});
|
| 6124 | config.AllocationSizes.push_back({1, 0, 0, 1024, 1024});
|
| 6125 | config.AllocationSizes.push_back({1, 0, 0, 2048, 2048});
|
| 6126 | }
|
| 6127 | }
|
| 6128 | }
|
| 6129 |
|
| 6130 | const VkDeviceSize avgResourceSize = config.CalcAvgResourceSize();
|
| 6131 | config.PoolSize = avgResourceSize * AVG_RESOURCES_PER_POOL;
|
| 6132 |
|
| 6133 | // 0 = 66%, 1 = 133%, 2 = 100%, 3 = 33%, 4 = 166%
|
| 6134 | size_t subscriptionModeCount;
|
| 6135 | switch(ConfigType)
|
| 6136 | {
|
| 6137 | case CONFIG_TYPE_MINIMUM: subscriptionModeCount = 2; break;
|
| 6138 | case CONFIG_TYPE_SMALL: subscriptionModeCount = 2; break;
|
| 6139 | case CONFIG_TYPE_AVERAGE: subscriptionModeCount = 3; break;
|
| 6140 | case CONFIG_TYPE_LARGE: subscriptionModeCount = 5; break;
|
| 6141 | case CONFIG_TYPE_MAXIMUM: subscriptionModeCount = 5; break;
|
| 6142 | default: assert(0);
|
| 6143 | }
|
| 6144 | for(size_t subscriptionModeIndex = 0; subscriptionModeIndex < subscriptionModeCount; ++subscriptionModeIndex)
|
| 6145 | {
|
| 6146 | std::string desc5 = desc4;
|
| 6147 |
|
| 6148 | switch(subscriptionModeIndex)
|
| 6149 | {
|
| 6150 | case 0:
|
| 6151 | desc5 += " Subscription_66%";
|
| 6152 | config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 66 / 100;
|
| 6153 | break;
|
| 6154 | case 1:
|
| 6155 | desc5 += " Subscription_133%";
|
| 6156 | config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 133 / 100;
|
| 6157 | break;
|
| 6158 | case 2:
|
| 6159 | desc5 += " Subscription_100%";
|
| 6160 | config.UsedItemCountMax = AVG_RESOURCES_PER_POOL;
|
| 6161 | break;
|
| 6162 | case 3:
|
| 6163 | desc5 += " Subscription_33%";
|
| 6164 | config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 33 / 100;
|
| 6165 | break;
|
| 6166 | case 4:
|
| 6167 | desc5 += " Subscription_166%";
|
| 6168 | config.UsedItemCountMax = AVG_RESOURCES_PER_POOL * 166 / 100;
|
| 6169 | break;
|
| 6170 | default:
|
| 6171 | assert(0);
|
| 6172 | }
|
| 6173 |
|
| 6174 | config.TotalItemCount = config.UsedItemCountMax * 5;
|
| 6175 | config.UsedItemCountMin = config.UsedItemCountMax * 80 / 100;
|
| 6176 |
|
| 6177 | const char* testDescription = desc5.c_str();
|
| 6178 |
|
| 6179 | for(size_t repeat = 0; repeat < repeatCount; ++repeat)
|
| 6180 | {
|
Adam Sawicki | 740b08f | 2018-08-27 13:42:07 +0200 | [diff] [blame] | 6181 | printf("%s #%u\n", testDescription, (uint32_t)repeat);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 6182 |
|
| 6183 | PoolTestResult result{};
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 6184 | TestPool_Benchmark(result, config);
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 6185 | WritePoolTestResult(file, CODE_DESCRIPTION, testDescription, config, result);
|
| 6186 | }
|
| 6187 | }
|
| 6188 | }
|
| 6189 | }
|
| 6190 | }
|
| 6191 | }
|
| 6192 | }
|
| 6193 |
|
Adam Sawicki | a83793a | 2018-09-03 13:40:42 +0200 | [diff] [blame] | 6194 | static void BasicTestBuddyAllocator()
|
| 6195 | {
|
| 6196 | wprintf(L"Basic test buddy allocator\n");
|
| 6197 |
|
| 6198 | RandomNumberGenerator rand{76543};
|
| 6199 |
|
| 6200 | VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 6201 | sampleBufCreateInfo.size = 1024; // Whatever.
|
| 6202 | sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
|
| 6203 |
|
| 6204 | VmaAllocationCreateInfo sampleAllocCreateInfo = {};
|
| 6205 | sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 6206 |
|
| 6207 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 6208 | VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 6209 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | a83793a | 2018-09-03 13:40:42 +0200 | [diff] [blame] | 6210 |
|
Adam Sawicki | d6e6d6b | 2018-09-21 14:07:02 +0200 | [diff] [blame] | 6211 | // Deliberately adding 1023 to test usable size smaller than memory block size.
|
| 6212 | poolCreateInfo.blockSize = 1024 * 1024 + 1023;
|
Adam Sawicki | a83793a | 2018-09-03 13:40:42 +0200 | [diff] [blame] | 6213 | poolCreateInfo.flags = VMA_POOL_CREATE_BUDDY_ALGORITHM_BIT;
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 6214 | //poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
|
Adam Sawicki | a83793a | 2018-09-03 13:40:42 +0200 | [diff] [blame] | 6215 |
|
| 6216 | VmaPool pool = nullptr;
|
| 6217 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 6218 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | a83793a | 2018-09-03 13:40:42 +0200 | [diff] [blame] | 6219 |
|
| 6220 | VkBufferCreateInfo bufCreateInfo = sampleBufCreateInfo;
|
| 6221 |
|
| 6222 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 6223 | allocCreateInfo.pool = pool;
|
| 6224 |
|
| 6225 | std::vector<BufferInfo> bufInfo;
|
| 6226 | BufferInfo newBufInfo;
|
| 6227 | VmaAllocationInfo allocInfo;
|
| 6228 |
|
| 6229 | bufCreateInfo.size = 1024 * 256;
|
| 6230 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 6231 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 6232 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | a83793a | 2018-09-03 13:40:42 +0200 | [diff] [blame] | 6233 | bufInfo.push_back(newBufInfo);
|
| 6234 |
|
| 6235 | bufCreateInfo.size = 1024 * 512;
|
| 6236 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 6237 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 6238 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | a83793a | 2018-09-03 13:40:42 +0200 | [diff] [blame] | 6239 | bufInfo.push_back(newBufInfo);
|
| 6240 |
|
| 6241 | bufCreateInfo.size = 1024 * 128;
|
| 6242 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 6243 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 6244 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | a83793a | 2018-09-03 13:40:42 +0200 | [diff] [blame] | 6245 | bufInfo.push_back(newBufInfo);
|
Adam Sawicki | a01d458 | 2018-09-21 14:22:35 +0200 | [diff] [blame] | 6246 |
|
| 6247 | // Test very small allocation, smaller than minimum node size.
|
| 6248 | bufCreateInfo.size = 1;
|
| 6249 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 6250 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 6251 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | a01d458 | 2018-09-21 14:22:35 +0200 | [diff] [blame] | 6252 | bufInfo.push_back(newBufInfo);
|
Adam Sawicki | a83793a | 2018-09-03 13:40:42 +0200 | [diff] [blame] | 6253 |
|
Adam Sawicki | 9933c5c | 2018-09-21 14:57:24 +0200 | [diff] [blame] | 6254 | // Test some small allocation with alignment requirement.
|
| 6255 | {
|
| 6256 | VkMemoryRequirements memReq;
|
| 6257 | memReq.alignment = 256;
|
| 6258 | memReq.memoryTypeBits = UINT32_MAX;
|
| 6259 | memReq.size = 32;
|
| 6260 |
|
| 6261 | newBufInfo.Buffer = VK_NULL_HANDLE;
|
| 6262 | res = vmaAllocateMemory(g_hAllocator, &memReq, &allocCreateInfo,
|
| 6263 | &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 6264 | TEST(res == VK_SUCCESS);
|
| 6265 | TEST(allocInfo.offset % memReq.alignment == 0);
|
Adam Sawicki | 9933c5c | 2018-09-21 14:57:24 +0200 | [diff] [blame] | 6266 | bufInfo.push_back(newBufInfo);
|
| 6267 | }
|
| 6268 |
|
| 6269 | //SaveAllocatorStatsToFile(L"TEST.json");
|
| 6270 |
|
Adam Sawicki | 21017c6 | 2018-09-07 15:26:59 +0200 | [diff] [blame] | 6271 | VmaPoolStats stats = {};
|
| 6272 | vmaGetPoolStats(g_hAllocator, pool, &stats);
|
| 6273 | int DBG = 0; // Set breakpoint here to inspect `stats`.
|
| 6274 |
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 6275 | // Allocate enough new buffers to surely fall into second block.
|
| 6276 | for(uint32_t i = 0; i < 32; ++i)
|
| 6277 | {
|
| 6278 | bufCreateInfo.size = 1024 * (rand.Generate() % 32 + 1);
|
| 6279 | res = vmaCreateBuffer(g_hAllocator, &bufCreateInfo, &allocCreateInfo,
|
| 6280 | &newBufInfo.Buffer, &newBufInfo.Allocation, &allocInfo);
|
Adam Sawicki | b8d34d5 | 2018-10-03 17:41:20 +0200 | [diff] [blame] | 6281 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 6282 | bufInfo.push_back(newBufInfo);
|
| 6283 | }
|
| 6284 |
|
| 6285 | SaveAllocatorStatsToFile(L"BuddyTest01.json");
|
| 6286 |
|
Adam Sawicki | a83793a | 2018-09-03 13:40:42 +0200 | [diff] [blame] | 6287 | // Destroy the buffers in random order.
|
| 6288 | while(!bufInfo.empty())
|
| 6289 | {
|
| 6290 | const size_t indexToDestroy = rand.Generate() % bufInfo.size();
|
| 6291 | const BufferInfo& currBufInfo = bufInfo[indexToDestroy];
|
| 6292 | vmaDestroyBuffer(g_hAllocator, currBufInfo.Buffer, currBufInfo.Allocation);
|
| 6293 | bufInfo.erase(bufInfo.begin() + indexToDestroy);
|
| 6294 | }
|
| 6295 |
|
| 6296 | vmaDestroyPool(g_hAllocator, pool);
|
| 6297 | }
|
| 6298 |
|
Adam Sawicki | 2e4d3ef | 2018-10-03 15:48:17 +0200 | [diff] [blame] | 6299 | static void BasicTestAllocatePages()
|
| 6300 | {
|
| 6301 | wprintf(L"Basic test allocate pages\n");
|
| 6302 |
|
| 6303 | RandomNumberGenerator rand{765461};
|
| 6304 |
|
| 6305 | VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
|
| 6306 | sampleBufCreateInfo.size = 1024; // Whatever.
|
| 6307 | sampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
| 6308 |
|
| 6309 | VmaAllocationCreateInfo sampleAllocCreateInfo = {};
|
| 6310 | sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 6311 |
|
| 6312 | VmaPoolCreateInfo poolCreateInfo = {};
|
| 6313 | VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_hAllocator, &sampleBufCreateInfo, &sampleAllocCreateInfo, &poolCreateInfo.memoryTypeIndex);
|
Adam Sawicki | a7d7769 | 2018-10-03 16:15:27 +0200 | [diff] [blame] | 6314 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 2e4d3ef | 2018-10-03 15:48:17 +0200 | [diff] [blame] | 6315 |
|
| 6316 | // 1 block of 1 MB.
|
| 6317 | poolCreateInfo.blockSize = 1024 * 1024;
|
| 6318 | poolCreateInfo.minBlockCount = poolCreateInfo.maxBlockCount = 1;
|
| 6319 |
|
| 6320 | // Create pool.
|
| 6321 | VmaPool pool = nullptr;
|
| 6322 | res = vmaCreatePool(g_hAllocator, &poolCreateInfo, &pool);
|
Adam Sawicki | a7d7769 | 2018-10-03 16:15:27 +0200 | [diff] [blame] | 6323 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 2e4d3ef | 2018-10-03 15:48:17 +0200 | [diff] [blame] | 6324 |
|
| 6325 | // Make 100 allocations of 4 KB - they should fit into the pool.
|
| 6326 | VkMemoryRequirements memReq;
|
| 6327 | memReq.memoryTypeBits = UINT32_MAX;
|
| 6328 | memReq.alignment = 4 * 1024;
|
| 6329 | memReq.size = 4 * 1024;
|
| 6330 |
|
| 6331 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 6332 | allocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
|
| 6333 | allocCreateInfo.pool = pool;
|
| 6334 |
|
| 6335 | constexpr uint32_t allocCount = 100;
|
| 6336 |
|
| 6337 | std::vector<VmaAllocation> alloc{allocCount};
|
| 6338 | std::vector<VmaAllocationInfo> allocInfo{allocCount};
|
| 6339 | res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
|
Adam Sawicki | a7d7769 | 2018-10-03 16:15:27 +0200 | [diff] [blame] | 6340 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 2e4d3ef | 2018-10-03 15:48:17 +0200 | [diff] [blame] | 6341 | for(uint32_t i = 0; i < allocCount; ++i)
|
| 6342 | {
|
Adam Sawicki | a7d7769 | 2018-10-03 16:15:27 +0200 | [diff] [blame] | 6343 | TEST(alloc[i] != VK_NULL_HANDLE &&
|
Adam Sawicki | 2e4d3ef | 2018-10-03 15:48:17 +0200 | [diff] [blame] | 6344 | allocInfo[i].pMappedData != nullptr &&
|
| 6345 | allocInfo[i].deviceMemory == allocInfo[0].deviceMemory &&
|
| 6346 | allocInfo[i].memoryType == allocInfo[0].memoryType);
|
| 6347 | }
|
| 6348 |
|
| 6349 | // Free the allocations.
|
| 6350 | vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
|
| 6351 | std::fill(alloc.begin(), alloc.end(), nullptr);
|
| 6352 | std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
|
| 6353 |
|
| 6354 | // Try to make 100 allocations of 100 KB. This call should fail due to not enough memory.
|
| 6355 | // Also test optional allocationInfo = null.
|
| 6356 | memReq.size = 100 * 1024;
|
| 6357 | res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), nullptr);
|
Adam Sawicki | a7d7769 | 2018-10-03 16:15:27 +0200 | [diff] [blame] | 6358 | TEST(res != VK_SUCCESS);
|
| 6359 | TEST(std::find_if(alloc.begin(), alloc.end(), [](VmaAllocation alloc){ return alloc != VK_NULL_HANDLE; }) == alloc.end());
|
Adam Sawicki | 2e4d3ef | 2018-10-03 15:48:17 +0200 | [diff] [blame] | 6360 |
|
| 6361 | // Make 100 allocations of 4 KB, but with required alignment of 128 KB. This should also fail.
|
| 6362 | memReq.size = 4 * 1024;
|
| 6363 | memReq.alignment = 128 * 1024;
|
| 6364 | res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &allocCreateInfo, allocCount, alloc.data(), allocInfo.data());
|
Adam Sawicki | a7d7769 | 2018-10-03 16:15:27 +0200 | [diff] [blame] | 6365 | TEST(res != VK_SUCCESS);
|
Adam Sawicki | 2e4d3ef | 2018-10-03 15:48:17 +0200 | [diff] [blame] | 6366 |
|
| 6367 | // Make 100 dedicated allocations of 4 KB.
|
| 6368 | memReq.alignment = 4 * 1024;
|
| 6369 | memReq.size = 4 * 1024;
|
| 6370 |
|
| 6371 | VmaAllocationCreateInfo dedicatedAllocCreateInfo = {};
|
| 6372 | dedicatedAllocCreateInfo.usage = VMA_MEMORY_USAGE_CPU_ONLY;
|
| 6373 | dedicatedAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT | VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
|
| 6374 | res = vmaAllocateMemoryPages(g_hAllocator, &memReq, &dedicatedAllocCreateInfo, allocCount, alloc.data(), allocInfo.data());
|
Adam Sawicki | a7d7769 | 2018-10-03 16:15:27 +0200 | [diff] [blame] | 6375 | TEST(res == VK_SUCCESS);
|
Adam Sawicki | 2e4d3ef | 2018-10-03 15:48:17 +0200 | [diff] [blame] | 6376 | for(uint32_t i = 0; i < allocCount; ++i)
|
| 6377 | {
|
Adam Sawicki | a7d7769 | 2018-10-03 16:15:27 +0200 | [diff] [blame] | 6378 | TEST(alloc[i] != VK_NULL_HANDLE &&
|
Adam Sawicki | 2e4d3ef | 2018-10-03 15:48:17 +0200 | [diff] [blame] | 6379 | allocInfo[i].pMappedData != nullptr &&
|
| 6380 | allocInfo[i].memoryType == allocInfo[0].memoryType &&
|
| 6381 | allocInfo[i].offset == 0);
|
| 6382 | if(i > 0)
|
| 6383 | {
|
Adam Sawicki | a7d7769 | 2018-10-03 16:15:27 +0200 | [diff] [blame] | 6384 | TEST(allocInfo[i].deviceMemory != allocInfo[0].deviceMemory);
|
Adam Sawicki | 2e4d3ef | 2018-10-03 15:48:17 +0200 | [diff] [blame] | 6385 | }
|
| 6386 | }
|
| 6387 |
|
| 6388 | // Free the allocations.
|
| 6389 | vmaFreeMemoryPages(g_hAllocator, allocCount, alloc.data());
|
| 6390 | std::fill(alloc.begin(), alloc.end(), nullptr);
|
| 6391 | std::fill(allocInfo.begin(), allocInfo.end(), VmaAllocationInfo{});
|
| 6392 |
|
| 6393 | vmaDestroyPool(g_hAllocator, pool);
|
| 6394 | }
|
| 6395 |
|
Adam Sawicki | f297534 | 2018-10-16 13:49:02 +0200 | [diff] [blame] | 6396 | // Test the testing environment.
|
| 6397 | static void TestGpuData()
|
| 6398 | {
|
| 6399 | RandomNumberGenerator rand = { 53434 };
|
| 6400 |
|
| 6401 | std::vector<AllocInfo> allocInfo;
|
| 6402 |
|
| 6403 | for(size_t i = 0; i < 100; ++i)
|
| 6404 | {
|
| 6405 | AllocInfo info = {};
|
| 6406 |
|
| 6407 | info.m_BufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
|
| 6408 | info.m_BufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT |
|
| 6409 | VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
|
| 6410 | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
|
| 6411 | info.m_BufferInfo.size = 1024 * 1024 * (rand.Generate() % 9 + 1);
|
| 6412 |
|
| 6413 | VmaAllocationCreateInfo allocCreateInfo = {};
|
| 6414 | allocCreateInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
|
| 6415 |
|
| 6416 | VkResult res = vmaCreateBuffer(g_hAllocator, &info.m_BufferInfo, &allocCreateInfo, &info.m_Buffer, &info.m_Allocation, nullptr);
|
| 6417 | TEST(res == VK_SUCCESS);
|
| 6418 |
|
| 6419 | info.m_StartValue = rand.Generate();
|
| 6420 |
|
| 6421 | allocInfo.push_back(std::move(info));
|
| 6422 | }
|
| 6423 |
|
| 6424 | UploadGpuData(allocInfo.data(), allocInfo.size());
|
| 6425 |
|
| 6426 | ValidateGpuData(allocInfo.data(), allocInfo.size());
|
| 6427 |
|
| 6428 | DestroyAllAllocations(allocInfo);
|
| 6429 | }
|
| 6430 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 6431 | void Test()
|
| 6432 | {
|
| 6433 | wprintf(L"TESTING:\n");
|
| 6434 |
|
Adam Sawicki | 48b8a33 | 2019-11-02 15:24:33 +0100 | [diff] [blame] | 6435 | if(false)
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 6436 | {
|
Adam Sawicki | 1a8424f | 2018-12-13 11:01:16 +0100 | [diff] [blame] | 6437 | ////////////////////////////////////////////////////////////////////////////////
|
| 6438 | // Temporarily insert custom tests here:
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 6439 | return;
|
| 6440 | }
|
| 6441 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 6442 | // # Simple tests
|
| 6443 |
|
| 6444 | TestBasics();
|
Adam Sawicki | aaa1a56 | 2020-06-24 17:41:09 +0200 | [diff] [blame] | 6445 | TestAllocationVersusResourceSize();
|
Adam Sawicki | f297534 | 2018-10-16 13:49:02 +0200 | [diff] [blame] | 6446 | //TestGpuData(); // Not calling this because it's just testing the testing environment.
|
Adam Sawicki | 212a4a6 | 2018-06-14 15:44:45 +0200 | [diff] [blame] | 6447 | #if VMA_DEBUG_MARGIN
|
| 6448 | TestDebugMargin();
|
| 6449 | #else
|
| 6450 | TestPool_SameSize();
|
Adam Sawicki | ddcbf8c | 2019-11-22 15:22:42 +0100 | [diff] [blame] | 6451 | TestPool_MinBlockCount();
|
Adam Sawicki | 212a4a6 | 2018-06-14 15:44:45 +0200 | [diff] [blame] | 6452 | TestHeapSizeLimit();
|
| 6453 | #endif
|
Adam Sawicki | e44c626 | 2018-06-15 14:30:39 +0200 | [diff] [blame] | 6454 | #if VMA_DEBUG_INITIALIZE_ALLOCATIONS
|
| 6455 | TestAllocationsInitialization();
|
| 6456 | #endif
|
Adam Sawicki | efa88c4 | 2019-11-18 16:33:56 +0100 | [diff] [blame] | 6457 | TestMemoryUsage();
|
Adam Sawicki | 5088250 | 2020-02-07 16:51:31 +0100 | [diff] [blame] | 6458 | TestDeviceCoherentMemory();
|
Adam Sawicki | 40ffe98 | 2019-10-11 15:56:02 +0200 | [diff] [blame] | 6459 | TestBudget();
|
Adam Sawicki | 0620c8e | 2020-08-18 16:43:44 +0200 | [diff] [blame] | 6460 | TestAliasing();
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 6461 | TestMapping();
|
Adam Sawicki | daa6a55 | 2019-06-25 15:26:37 +0200 | [diff] [blame] | 6462 | TestDeviceLocalMapped();
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 6463 | TestMappingMultithreaded();
|
Adam Sawicki | 0876c0d | 2018-06-20 15:18:11 +0200 | [diff] [blame] | 6464 | TestLinearAllocator();
|
Adam Sawicki | 8cfe05f | 2018-08-22 16:48:17 +0200 | [diff] [blame] | 6465 | ManuallyTestLinearAllocator();
|
Adam Sawicki | 70a683e | 2018-08-24 15:36:32 +0200 | [diff] [blame] | 6466 | TestLinearAllocatorMultiBlock();
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 6467 |
|
Adam Sawicki | 4338f66 | 2018-09-07 14:12:37 +0200 | [diff] [blame] | 6468 | BasicTestBuddyAllocator();
|
Adam Sawicki | 2e4d3ef | 2018-10-03 15:48:17 +0200 | [diff] [blame] | 6469 | BasicTestAllocatePages();
|
Adam Sawicki | 4338f66 | 2018-09-07 14:12:37 +0200 | [diff] [blame] | 6470 |
|
Adam Sawicki | e73e988 | 2020-03-20 18:05:42 +0100 | [diff] [blame] | 6471 | if(g_BufferDeviceAddressEnabled)
|
| 6472 | TestBufferDeviceAddress();
|
Adam Sawicki | f201205 | 2021-01-11 18:04:42 +0100 | [diff] [blame] | 6473 | if(VK_EXT_memory_priority_enabled)
|
| 6474 | TestMemoryPriority();
|
Adam Sawicki | e73e988 | 2020-03-20 18:05:42 +0100 | [diff] [blame] | 6475 |
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 6476 | {
|
| 6477 | FILE* file;
|
Adam Sawicki | c6432d1 | 2018-09-21 16:44:16 +0200 | [diff] [blame] | 6478 | fopen_s(&file, "Algorithms.csv", "w");
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 6479 | assert(file != NULL);
|
Adam Sawicki | 8092715 | 2018-09-07 17:27:23 +0200 | [diff] [blame] | 6480 | BenchmarkAlgorithms(file);
|
Adam Sawicki | 33d2ce7 | 2018-08-27 13:59:13 +0200 | [diff] [blame] | 6481 | fclose(file);
|
| 6482 | }
|
| 6483 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 6484 | TestDefragmentationSimple();
|
| 6485 | TestDefragmentationFull();
|
Adam Sawicki | 52076eb | 2018-11-22 16:14:50 +0100 | [diff] [blame] | 6486 | TestDefragmentationWholePool();
|
Adam Sawicki | 9a4f508 | 2018-11-23 17:26:05 +0100 | [diff] [blame] | 6487 | TestDefragmentationGpu();
|
Adam Sawicki | a52012d | 2019-12-23 15:28:51 +0100 | [diff] [blame] | 6488 | TestDefragmentationIncrementalBasic();
|
| 6489 | TestDefragmentationIncrementalComplex();
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 6490 |
|
| 6491 | // # Detailed tests
|
| 6492 | FILE* file;
|
| 6493 | fopen_s(&file, "Results.csv", "w");
|
| 6494 | assert(file != NULL);
|
| 6495 |
|
| 6496 | WriteMainTestResultHeader(file);
|
| 6497 | PerformMainTests(file);
|
| 6498 | //PerformCustomMainTest(file);
|
| 6499 |
|
| 6500 | WritePoolTestResultHeader(file);
|
| 6501 | PerformPoolTests(file);
|
| 6502 | //PerformCustomPoolTest(file);
|
| 6503 |
|
| 6504 | fclose(file);
|
Adam Sawicki | 4ac8ff8 | 2019-11-18 14:47:33 +0100 | [diff] [blame] | 6505 |
|
Adam Sawicki | b8333fb | 2018-03-13 16:15:53 +0100 | [diff] [blame] | 6506 | wprintf(L"Done.\n");
|
| 6507 | }
|
| 6508 |
|
Adam Sawicki | f1a793c | 2018-03-13 15:42:22 +0100 | [diff] [blame] | 6509 | #endif // #ifdef _WIN32
|