blob: 468794312a85a8226263cfd45a3821e3406f880d [file] [log] [blame]
Adam Sawickie6e498f2017-06-16 17:21:31 +02001//
2// Copyright (c) 2017 Advanced Micro Devices, Inc. All rights reserved.
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21//
22
23#define NOMINMAX
24#define WIN32_LEAN_AND_MEAN
25#include <Windows.h>
26
27#define VK_USE_PLATFORM_WIN32_KHR
28#include <vulkan/vulkan.h>
29
Adam Sawicki86ccd632017-07-04 14:57:53 +020030#pragma warning(push, 4)
31#pragma warning(disable: 4127) // warning C4127: conditional expression is constant
Adam Sawickie6e498f2017-06-16 17:21:31 +020032#define VMA_IMPLEMENTATION
33#include "vk_mem_alloc.h"
Adam Sawicki86ccd632017-07-04 14:57:53 +020034#pragma warning(pop)
Adam Sawickie6e498f2017-06-16 17:21:31 +020035
36#define MATHFU_COMPILE_WITHOUT_SIMD_SUPPORT
37#include <mathfu/glsl_mappings.h>
38#include <mathfu/constants.h>
39
40#include <fstream>
41#include <vector>
42#include <string>
43#include <memory>
44#include <algorithm>
45#include <numeric>
46#include <array>
47#include <type_traits>
48#include <utility>
49
50#include <cmath>
51#include <cassert>
52#include <cstdlib>
53#include <cstdio>
54
55#define ERR_GUARD_VULKAN(Expr) do { VkResult res__ = (Expr); if (res__ < 0) assert(0); } while(0)
56
57static const char* const SHADER_PATH1 = "./";
58static const char* const SHADER_PATH2 = "../bin/";
59static const wchar_t* const WINDOW_CLASS_NAME = L"VULKAN_MEMORY_ALLOCATOR_SAMPLE";
60static const char* const VALIDATION_LAYER_NAME = "VK_LAYER_LUNARG_standard_validation";
61static const char* const APP_TITLE_A = "Vulkan Memory Allocator Sample 1.0";
62static const wchar_t* const APP_TITLE_W = L"Vulkan Memory Allocator Sample 1.0";
63
64static const bool VSYNC = true;
65static const uint32_t COMMAND_BUFFER_COUNT = 2;
66
67static bool g_EnableValidationLayer = true;
68
69static HINSTANCE g_hAppInstance;
70static HWND g_hWnd;
71static LONG g_SizeX = 1280, g_SizeY = 720;
72static VkInstance g_hVulkanInstance;
73static VkSurfaceKHR g_hSurface;
74static VkPhysicalDevice g_hPhysicalDevice;
75static VkQueue g_hPresentQueue;
76static VkSurfaceFormatKHR g_SurfaceFormat;
77static VkExtent2D g_Extent;
78static VkSwapchainKHR g_hSwapchain;
79static std::vector<VkImage> g_SwapchainImages;
80static std::vector<VkImageView> g_SwapchainImageViews;
81static std::vector<VkFramebuffer> g_Framebuffers;
82static VkCommandPool g_hCommandPool;
83static VkCommandBuffer g_MainCommandBuffers[COMMAND_BUFFER_COUNT];
84static VkFence g_MainCommandBufferExecutedFances[COMMAND_BUFFER_COUNT];
85static uint32_t g_NextCommandBufferIndex;
86static VkSemaphore g_hImageAvailableSemaphore;
87static VkSemaphore g_hRenderFinishedSemaphore;
88static uint32_t g_GraphicsQueueFamilyIndex = UINT_MAX;
89static uint32_t g_PresentQueueFamilyIndex = UINT_MAX;
90static VkDescriptorSetLayout g_hDescriptorSetLayout;
91static VkDescriptorPool g_hDescriptorPool;
92static VkDescriptorSet g_hDescriptorSet; // Automatically destroyed with m_DescriptorPool.
93static VkSampler g_hSampler;
94static VkFormat g_DepthFormat;
95static VkImage g_hDepthImage;
96static VkImageView g_hDepthImageView;
97
98static VkSurfaceCapabilitiesKHR g_SurfaceCapabilities;
99static std::vector<VkSurfaceFormatKHR> g_SurfaceFormats;
100static std::vector<VkPresentModeKHR> g_PresentModes;
101
102static PFN_vkCreateDebugReportCallbackEXT g_pvkCreateDebugReportCallbackEXT;
103static PFN_vkDebugReportMessageEXT g_pvkDebugReportMessageEXT;
104static PFN_vkDestroyDebugReportCallbackEXT g_pvkDestroyDebugReportCallbackEXT;
105static VkDebugReportCallbackEXT g_hCallback;
106
107static VkDevice g_hDevice;
108static VmaAllocator g_hAllocator;
109static VkQueue g_hGraphicsQueue;
110static VkCommandBuffer g_hTemporaryCommandBuffer;
111
112static VkPipelineLayout g_hPipelineLayout;
113static VkRenderPass g_hRenderPass;
114static VkPipeline g_hPipeline;
115
116static VkBuffer g_hVertexBuffer;
117static VkBuffer g_hIndexBuffer;
118static uint32_t g_VertexCount;
119static uint32_t g_IndexCount;
120
121static VkImage g_hTextureImage;
122static VkImageView g_hTextureImageView;
123
124static void BeginSingleTimeCommands()
125{
126 VkCommandBufferBeginInfo cmdBufBeginInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO };
127 cmdBufBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
128 ERR_GUARD_VULKAN( vkBeginCommandBuffer(g_hTemporaryCommandBuffer, &cmdBufBeginInfo) );
129}
130
131static void EndSingleTimeCommands()
132{
133 ERR_GUARD_VULKAN( vkEndCommandBuffer(g_hTemporaryCommandBuffer) );
134
135 VkSubmitInfo submitInfo = { VK_STRUCTURE_TYPE_SUBMIT_INFO };
136 submitInfo.commandBufferCount = 1;
137 submitInfo.pCommandBuffers = &g_hTemporaryCommandBuffer;
138
139 ERR_GUARD_VULKAN( vkQueueSubmit(g_hGraphicsQueue, 1, &submitInfo, VK_NULL_HANDLE) );
140 ERR_GUARD_VULKAN( vkQueueWaitIdle(g_hGraphicsQueue) );
141}
142
143static void LoadShader(std::vector<char>& out, const char* fileName)
144{
145 std::ifstream file(std::string(SHADER_PATH1) + fileName, std::ios::ate | std::ios::binary);
146 if(file.is_open() == false)
147 file.open(std::string(SHADER_PATH2) + fileName, std::ios::ate | std::ios::binary);
148 assert(file.is_open());
149 size_t fileSize = (size_t)file.tellg();
150 if(fileSize > 0)
151 {
152 out.resize(fileSize);
153 file.seekg(0);
154 file.read(out.data(), fileSize);
155 file.close();
156 }
157 else
158 out.clear();
159}
160
161VKAPI_ATTR VkBool32 VKAPI_CALL MyDebugReportCallback(
162 VkDebugReportFlagsEXT flags,
163 VkDebugReportObjectTypeEXT objectType,
164 uint64_t object,
165 size_t location,
166 int32_t messageCode,
167 const char* pLayerPrefix,
168 const char* pMessage,
169 void* pUserData)
170{
171 printf("%s \xBA %s\n", pLayerPrefix, pMessage);
172
173 if((flags == VK_DEBUG_REPORT_WARNING_BIT_EXT) ||
174 (flags == VK_DEBUG_REPORT_ERROR_BIT_EXT))
175 {
176 OutputDebugStringA(pMessage);
177 OutputDebugStringA("\n");
178 }
179
180 return VK_FALSE;
181}
182
183static VkSurfaceFormatKHR ChooseSurfaceFormat()
184{
185 assert(!g_SurfaceFormats.empty());
186
187 if((g_SurfaceFormats.size() == 1) && (g_SurfaceFormats[0].format == VK_FORMAT_UNDEFINED))
188 {
189 VkSurfaceFormatKHR result = { VK_FORMAT_B8G8R8A8_UNORM, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR };
190 return result;
191 }
192
193 for(const auto& format : g_SurfaceFormats)
194 {
195 if((format.format == VK_FORMAT_B8G8R8A8_UNORM) &&
196 (format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR))
197 {
198 return format;
199 }
200 }
201
202 return g_SurfaceFormats[0];
203}
204
205VkPresentModeKHR ChooseSwapPresentMode()
206{
207 VkPresentModeKHR preferredMode = VSYNC ? VK_PRESENT_MODE_MAILBOX_KHR : VK_PRESENT_MODE_IMMEDIATE_KHR;
208
209 if(std::find(g_PresentModes.begin(), g_PresentModes.end(), preferredMode) !=
210 g_PresentModes.end())
211 {
212 return preferredMode;
213 }
214
215 return VK_PRESENT_MODE_FIFO_KHR;
216}
217
218static VkExtent2D ChooseSwapExtent()
219{
220 if(g_SurfaceCapabilities.currentExtent.width != UINT_MAX)
221 return g_SurfaceCapabilities.currentExtent;
222
223 VkExtent2D result = {
224 std::max(g_SurfaceCapabilities.minImageExtent.width,
225 std::min(g_SurfaceCapabilities.maxImageExtent.width, (uint32_t)g_SizeX)),
226 std::max(g_SurfaceCapabilities.minImageExtent.height,
227 std::min(g_SurfaceCapabilities.maxImageExtent.height, (uint32_t)g_SizeY)) };
228 return result;
229}
230
231struct Vertex
232{
233 float pos[3];
234 float color[3];
235 float texCoord[2];
236};
237
238static void CreateMesh()
239{
240 assert(g_hAllocator);
241
242 static Vertex vertices[] = {
243 // -X
244 { { -1.f, -1.f, -1.f}, {1.0f, 0.0f, 0.0f}, {0.f, 0.f} },
245 { { -1.f, -1.f, 1.f}, {1.0f, 0.0f, 0.0f}, {1.f, 0.f} },
246 { { -1.f, 1.f, -1.f}, {1.0f, 0.0f, 0.0f}, {0.f, 1.f} },
247 { { -1.f, 1.f, 1.f}, {1.0f, 0.0f, 0.0f}, {1.f, 1.f} },
248 // +X
249 { { 1.f, -1.f, 1.f}, {0.0f, 1.0f, 0.0f}, {0.f, 0.f} },
250 { { 1.f, -1.f, -1.f}, {0.0f, 1.0f, 0.0f}, {1.f, 0.f} },
251 { { 1.f, 1.f, 1.f}, {0.0f, 1.0f, 0.0f}, {0.f, 1.f} },
252 { { 1.f, 1.f, -1.f}, {0.0f, 1.0f, 0.0f}, {1.f, 1.f} },
253 // -Z
254 { { 1.f, -1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {0.f, 0.f} },
255 { {-1.f, -1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {1.f, 0.f} },
256 { { 1.f, 1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {0.f, 1.f} },
257 { {-1.f, 1.f, -1.f}, {0.0f, 0.0f, 1.0f}, {1.f, 1.f} },
258 // +Z
259 { {-1.f, -1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {0.f, 0.f} },
260 { { 1.f, -1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {1.f, 0.f} },
261 { {-1.f, 1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {0.f, 1.f} },
262 { { 1.f, 1.f, 1.f}, {1.0f, 1.0f, 0.0f}, {1.f, 1.f} },
263 // -Y
264 { {-1.f, -1.f, -1.f}, {0.0f, 1.0f, 1.0f}, {0.f, 0.f} },
265 { { 1.f, -1.f, -1.f}, {0.0f, 1.0f, 1.0f}, {1.f, 0.f} },
266 { {-1.f, -1.f, 1.f}, {0.0f, 1.0f, 1.0f}, {0.f, 1.f} },
267 { { 1.f, -1.f, 1.f}, {0.0f, 1.0f, 1.0f}, {1.f, 1.f} },
268 // +Y
269 { { 1.f, 1.f, -1.f}, {1.0f, 0.0f, 1.0f}, {0.f, 0.f} },
270 { {-1.f, 1.f, -1.f}, {1.0f, 0.0f, 1.0f}, {1.f, 0.f} },
271 { { 1.f, 1.f, 1.f}, {1.0f, 0.0f, 1.0f}, {0.f, 1.f} },
272 { {-1.f, 1.f, 1.f}, {1.0f, 0.0f, 1.0f}, {1.f, 1.f} },
273 };
274 static uint16_t indices[] = {
275 0, 1, 2, 3, USHRT_MAX,
276 4, 5, 6, 7, USHRT_MAX,
277 8, 9, 10, 11, USHRT_MAX,
278 12, 13, 14, 15, USHRT_MAX,
279 16, 17, 18, 19, USHRT_MAX,
280 20, 21, 22, 23, USHRT_MAX,
281 };
282
283 size_t vertexBufferSize = sizeof(Vertex) * _countof(vertices);
284 size_t indexBufferSize = sizeof(uint16_t) * _countof(indices);
285 g_IndexCount = (uint32_t)_countof(indices);
286
287 // Create vertex buffer
288
289 VkBufferCreateInfo vbInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
290 vbInfo.size = vertexBufferSize;
291 vbInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
292 vbInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
293 VmaMemoryRequirements vbMemReq = {};
Adam Sawicki2f16fa52017-07-04 14:43:20 +0200294 vbMemReq.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawickie6e498f2017-06-16 17:21:31 +0200295 VkMappedMemoryRange stagingVertexBufferMem;
296 VkBuffer stagingVertexBuffer = VK_NULL_HANDLE;
297 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &vbInfo, &vbMemReq, &stagingVertexBuffer, &stagingVertexBufferMem, nullptr) );
298
299 void* pVbData = nullptr;
300 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, &stagingVertexBufferMem, &pVbData) );
301 memcpy(pVbData, vertices, vertexBufferSize);
302 vmaUnmapMemory(g_hAllocator, &stagingVertexBufferMem);
303
Adam Sawicki2f16fa52017-07-04 14:43:20 +0200304 // No need to flush stagingVertexBuffer memory because CPU_ONLY memory is always HOST_COHERENT.
305
Adam Sawickie6e498f2017-06-16 17:21:31 +0200306 vbInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
307 vbMemReq.usage = VMA_MEMORY_USAGE_GPU_ONLY;
308 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &vbInfo, &vbMemReq, &g_hVertexBuffer, nullptr, nullptr) );
309
310 // Create index buffer
311
312 VkBufferCreateInfo ibInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
313 ibInfo.size = indexBufferSize;
314 ibInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
315 ibInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
316 VmaMemoryRequirements ibMemReq = {};
Adam Sawicki2f16fa52017-07-04 14:43:20 +0200317 ibMemReq.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawickie6e498f2017-06-16 17:21:31 +0200318 VkMappedMemoryRange stagingIndexBufferMem;
319 VkBuffer stagingIndexBuffer = VK_NULL_HANDLE;
320 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &ibInfo, &ibMemReq, &stagingIndexBuffer, &stagingIndexBufferMem, nullptr) );
321
322 void* pIbData = nullptr;
323 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, &stagingIndexBufferMem, &pIbData) );
324 memcpy(pIbData, indices, indexBufferSize);
325 vmaUnmapMemory(g_hAllocator, &stagingIndexBufferMem);
326
Adam Sawicki2f16fa52017-07-04 14:43:20 +0200327 // No need to flush stagingIndexBuffer memory because CPU_ONLY memory is always HOST_COHERENT.
328
Adam Sawickie6e498f2017-06-16 17:21:31 +0200329 ibInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
330 ibMemReq.usage = VMA_MEMORY_USAGE_GPU_ONLY;
331 ERR_GUARD_VULKAN( vmaCreateBuffer(g_hAllocator, &ibInfo, &ibMemReq, &g_hIndexBuffer, nullptr, nullptr) );
332
333 // Copy buffers
334
335 BeginSingleTimeCommands();
336
337 VkBufferCopy vbCopyRegion = {};
338 vbCopyRegion.srcOffset = 0;
339 vbCopyRegion.dstOffset = 0;
340 vbCopyRegion.size = vbInfo.size;
341 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingVertexBuffer, g_hVertexBuffer, 1, &vbCopyRegion);
342
343 VkBufferCopy ibCopyRegion = {};
344 ibCopyRegion.srcOffset = 0;
345 ibCopyRegion.dstOffset = 0;
346 ibCopyRegion.size = ibInfo.size;
347 vkCmdCopyBuffer(g_hTemporaryCommandBuffer, stagingIndexBuffer, g_hIndexBuffer, 1, &ibCopyRegion);
348
349 EndSingleTimeCommands();
350
351 vmaDestroyBuffer(g_hAllocator, stagingIndexBuffer);
352 vmaDestroyBuffer(g_hAllocator, stagingVertexBuffer);
353}
354
355static void CopyImage(VkImage srcImage, VkImage dstImage, uint32_t width, uint32_t height, uint32_t mipLevel)
356{
357 BeginSingleTimeCommands();
358
359 VkImageCopy imageCopy = {};
360 imageCopy.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
361 imageCopy.srcSubresource.baseArrayLayer = 0;
362 imageCopy.srcSubresource.mipLevel = mipLevel;
363 imageCopy.srcSubresource.layerCount = 1;
364 imageCopy.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
365 imageCopy.dstSubresource.baseArrayLayer = 0;
366 imageCopy.dstSubresource.mipLevel = mipLevel;
367 imageCopy.dstSubresource.layerCount = 1;
368 imageCopy.srcOffset.x = 0;
369 imageCopy.srcOffset.y = 0;
370 imageCopy.srcOffset.z = 0;
371 imageCopy.dstOffset.x = 0;
372 imageCopy.dstOffset.y = 0;
373 imageCopy.dstOffset.z = 0;
374 imageCopy.extent.width = width;
375 imageCopy.extent.height = height;
376 imageCopy.extent.depth = 1;
377 vkCmdCopyImage(
378 g_hTemporaryCommandBuffer,
379 srcImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
380 dstImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
381 1, &imageCopy);
382
383 EndSingleTimeCommands();
384}
385
386static void TransitionImageLayout(VkImage image, VkFormat format, uint32_t mipLevelCount, VkImageLayout oldLayout, VkImageLayout newLayout)
387{
388 BeginSingleTimeCommands();
389
390 VkImageMemoryBarrier imgMemBarrier = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER };
391 imgMemBarrier.oldLayout = oldLayout;
392 imgMemBarrier.newLayout = newLayout;
393 imgMemBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
394 imgMemBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
395 imgMemBarrier.image = image;
396 imgMemBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
397 imgMemBarrier.subresourceRange.baseMipLevel = 0;
398 imgMemBarrier.subresourceRange.levelCount = mipLevelCount;
399 imgMemBarrier.subresourceRange.baseArrayLayer = 0;
400 imgMemBarrier.subresourceRange.layerCount = 1;
401
402 if((oldLayout == VK_IMAGE_LAYOUT_PREINITIALIZED) &&
403 (newLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL))
404 {
405 imgMemBarrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
406 imgMemBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
407 }
408 else if((oldLayout == VK_IMAGE_LAYOUT_PREINITIALIZED) &&
409 (newLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL))
410 {
411 imgMemBarrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
412 imgMemBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
413 }
414 else if((oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) &&
415 (newLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL))
416 {
417 imgMemBarrier.srcAccessMask = 0;
418 imgMemBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
419 }
420 else if((oldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) &&
421 (newLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL))
422 {
423 imgMemBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
424 imgMemBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
425 }
426 else if ((oldLayout == VK_IMAGE_LAYOUT_UNDEFINED) &&
427 (newLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL))
428 {
429 imgMemBarrier.srcAccessMask = 0;
430 imgMemBarrier.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
431 }
432 else
433 assert(0);
434
435 if (newLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)
436 {
437 imgMemBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
438 if ((format == VK_FORMAT_D16_UNORM_S8_UINT) ||
439 (format == VK_FORMAT_D24_UNORM_S8_UINT) ||
440 (format == VK_FORMAT_D32_SFLOAT_S8_UINT))
441 {
442 imgMemBarrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
443 }
444 }
445
446 vkCmdPipelineBarrier(
447 g_hTemporaryCommandBuffer,
448 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
449 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
450 0,
451 0, nullptr,
452 0, nullptr,
453 1, &imgMemBarrier);
454
455 EndSingleTimeCommands();
456}
457
458static void CreateTexture(uint32_t sizeX, uint32_t sizeY)
459{
460 // Create Image
461
462 const VkDeviceSize imageSize = sizeX * sizeY * 4;
463
464 VkImageCreateInfo stagingImageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
465 stagingImageInfo.imageType = VK_IMAGE_TYPE_2D;
466 stagingImageInfo.extent.width = sizeX;
467 stagingImageInfo.extent.height = sizeY;
468 stagingImageInfo.extent.depth = 1;
469 stagingImageInfo.mipLevels = 1;
470 stagingImageInfo.arrayLayers = 1;
471 stagingImageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
472 stagingImageInfo.tiling = VK_IMAGE_TILING_LINEAR;
473 stagingImageInfo.initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED;
474 stagingImageInfo.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
475 stagingImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
476 stagingImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
477 stagingImageInfo.flags = 0;
478 VmaMemoryRequirements stagingImageMemReq = {};
Adam Sawicki2f16fa52017-07-04 14:43:20 +0200479 stagingImageMemReq.usage = VMA_MEMORY_USAGE_CPU_ONLY;
Adam Sawickie6e498f2017-06-16 17:21:31 +0200480 VkImage stagingImage = VK_NULL_HANDLE;
481 VkMappedMemoryRange stagingImageMem;
482 ERR_GUARD_VULKAN( vmaCreateImage(g_hAllocator, &stagingImageInfo, &stagingImageMemReq, &stagingImage, &stagingImageMem, nullptr) );
483
484 char* pImageData = nullptr;
485 ERR_GUARD_VULKAN( vmaMapMemory(g_hAllocator, &stagingImageMem, (void**)&pImageData) );
486
487 VkImageSubresource imageSubresource = {};
488 imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
489 imageSubresource.mipLevel = 0;
490 imageSubresource.arrayLayer = 0;
491
492 VkSubresourceLayout imageLayout = {};
493 vkGetImageSubresourceLayout(g_hDevice, stagingImage, &imageSubresource, &imageLayout);
494
495 char* const pMipLevelData = pImageData + imageLayout.offset;
496 uint8_t* pRowData = (uint8_t*)pMipLevelData;
497 for(uint32_t y = 0; y < sizeY; ++y)
498 {
499 uint32_t* pPixelData = (uint32_t*)pRowData;
500 for(uint32_t x = 0; x < sizeY; ++x)
501 {
502 *pPixelData =
503 ((x & 0x18) == 0x08 ? 0x000000FF : 0x00000000) |
504 ((x & 0x18) == 0x10 ? 0x0000FFFF : 0x00000000) |
505 ((y & 0x18) == 0x08 ? 0x0000FF00 : 0x00000000) |
506 ((y & 0x18) == 0x10 ? 0x00FF0000 : 0x00000000);
507 ++pPixelData;
508 }
509 pRowData += imageLayout.rowPitch;
510 }
511
512 vmaUnmapMemory(g_hAllocator, &stagingImageMem);
513
Adam Sawicki2f16fa52017-07-04 14:43:20 +0200514 // No need to flush stagingImage memory because CPU_ONLY memory is always HOST_COHERENT.
515
Adam Sawickie6e498f2017-06-16 17:21:31 +0200516 VkImageCreateInfo imageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
517 imageInfo.imageType = VK_IMAGE_TYPE_2D;
518 imageInfo.extent.width = sizeX;
519 imageInfo.extent.height = sizeY;
520 imageInfo.extent.depth = 1;
521 imageInfo.mipLevels = 1;
522 imageInfo.arrayLayers = 1;
523 imageInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
524 imageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
525 imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
526 imageInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
527 imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
528 imageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
529 imageInfo.flags = 0;
530 VmaMemoryRequirements imageMemReq = {};
531 imageMemReq.usage = VMA_MEMORY_USAGE_GPU_ONLY;
532 ERR_GUARD_VULKAN( vmaCreateImage(g_hAllocator, &imageInfo, &imageMemReq, &g_hTextureImage, nullptr, nullptr) );
533
534 TransitionImageLayout(
535 stagingImage,
536 VK_FORMAT_R8G8B8A8_UNORM,
537 1,
538 VK_IMAGE_LAYOUT_PREINITIALIZED,
539 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
540 TransitionImageLayout(
541 g_hTextureImage,
542 VK_FORMAT_R8G8B8A8_UNORM,
543 1,
544 VK_IMAGE_LAYOUT_UNDEFINED,
545 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
546 CopyImage(stagingImage, g_hTextureImage, sizeX, sizeY, 0);
547 TransitionImageLayout(
548 g_hTextureImage,
549 VK_FORMAT_R8G8B8A8_UNORM,
550 1,
551 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
552 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
553
554 vmaDestroyImage(g_hAllocator, stagingImage);
555
556 // Create ImageView
557
558 VkImageViewCreateInfo textureImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
559 textureImageViewInfo.image = g_hTextureImage;
560 textureImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
561 textureImageViewInfo.format = VK_FORMAT_R8G8B8A8_UNORM;
562 textureImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
563 textureImageViewInfo.subresourceRange.baseMipLevel = 0;
564 textureImageViewInfo.subresourceRange.levelCount = 1;
565 textureImageViewInfo.subresourceRange.baseArrayLayer = 0;
566 textureImageViewInfo.subresourceRange.layerCount = 1;
567 ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &textureImageViewInfo, nullptr, &g_hTextureImageView) );
568}
569
570struct UniformBufferObject
571{
572 mathfu::vec4_packed ModelViewProj[4];
573};
574
575static void RegisterDebugCallbacks()
576{
577 g_pvkCreateDebugReportCallbackEXT =
578 reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>
579 (vkGetInstanceProcAddr(g_hVulkanInstance, "vkCreateDebugReportCallbackEXT"));
580 g_pvkDebugReportMessageEXT =
581 reinterpret_cast<PFN_vkDebugReportMessageEXT>
582 (vkGetInstanceProcAddr(g_hVulkanInstance, "vkDebugReportMessageEXT"));
583 g_pvkDestroyDebugReportCallbackEXT =
584 reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>
585 (vkGetInstanceProcAddr(g_hVulkanInstance, "vkDestroyDebugReportCallbackEXT"));
586 assert(g_pvkCreateDebugReportCallbackEXT);
587 assert(g_pvkDebugReportMessageEXT);
588 assert(g_pvkDestroyDebugReportCallbackEXT);
589
590 VkDebugReportCallbackCreateInfoEXT callbackCreateInfo;
591 callbackCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
592 callbackCreateInfo.pNext = nullptr;
593 callbackCreateInfo.flags = //VK_DEBUG_REPORT_INFORMATION_BIT_EXT |
594 VK_DEBUG_REPORT_ERROR_BIT_EXT |
595 VK_DEBUG_REPORT_WARNING_BIT_EXT |
596 VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT /*|
597 VK_DEBUG_REPORT_DEBUG_BIT_EXT*/;
598 callbackCreateInfo.pfnCallback = &MyDebugReportCallback;
599 callbackCreateInfo.pUserData = nullptr;
600
601 ERR_GUARD_VULKAN( g_pvkCreateDebugReportCallbackEXT(g_hVulkanInstance, &callbackCreateInfo, nullptr, &g_hCallback) );
602}
603
604static bool IsLayerSupported(const VkLayerProperties* pProps, size_t propCount, const char* pLayerName)
605{
606 const VkLayerProperties* propsEnd = pProps + propCount;
607 return std::find_if(
608 pProps,
609 propsEnd,
610 [pLayerName](const VkLayerProperties& prop) -> bool {
611 return strcmp(pLayerName, prop.layerName) == 0;
612 }) != propsEnd;
613}
614
615static VkFormat FindSupportedFormat(
616 const std::vector<VkFormat>& candidates,
617 VkImageTiling tiling,
618 VkFormatFeatureFlags features)
619{
620 for (VkFormat format : candidates)
621 {
622 VkFormatProperties props;
623 vkGetPhysicalDeviceFormatProperties(g_hPhysicalDevice, format, &props);
624
625 if ((tiling == VK_IMAGE_TILING_LINEAR) &&
626 ((props.linearTilingFeatures & features) == features))
627 {
628 return format;
629 }
630 else if ((tiling == VK_IMAGE_TILING_OPTIMAL) &&
631 ((props.optimalTilingFeatures & features) == features))
632 {
633 return format;
634 }
635 }
636 return VK_FORMAT_UNDEFINED;
637}
638
639static VkFormat FindDepthFormat()
640{
641 std::vector<VkFormat> formats;
642 formats.push_back(VK_FORMAT_D32_SFLOAT);
643 formats.push_back(VK_FORMAT_D32_SFLOAT_S8_UINT);
644 formats.push_back(VK_FORMAT_D24_UNORM_S8_UINT);
645
646 return FindSupportedFormat(
647 formats,
648 VK_IMAGE_TILING_OPTIMAL,
649 VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT);
650}
651
652static void CreateSwapchain()
653{
654 // Query surface formats.
655
656 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceCapabilitiesKHR(g_hPhysicalDevice, g_hSurface, &g_SurfaceCapabilities) );
657
658 uint32_t formatCount = 0;
659 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceFormatsKHR(g_hPhysicalDevice, g_hSurface, &formatCount, nullptr) );
660 g_SurfaceFormats.resize(formatCount);
661 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfaceFormatsKHR(g_hPhysicalDevice, g_hSurface, &formatCount, g_SurfaceFormats.data()) );
662
663 uint32_t presentModeCount = 0;
664 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfacePresentModesKHR(g_hPhysicalDevice, g_hSurface, &presentModeCount, nullptr) );
665 g_PresentModes.resize(presentModeCount);
666 ERR_GUARD_VULKAN( vkGetPhysicalDeviceSurfacePresentModesKHR(g_hPhysicalDevice, g_hSurface, &presentModeCount, g_PresentModes.data()) );
667
668 // Create swap chain
669
670 g_SurfaceFormat = ChooseSurfaceFormat();
671 VkPresentModeKHR presentMode = ChooseSwapPresentMode();
672 g_Extent = ChooseSwapExtent();
673
674 uint32_t imageCount = g_SurfaceCapabilities.minImageCount + 1;
675 if((g_SurfaceCapabilities.maxImageCount > 0) &&
676 (imageCount > g_SurfaceCapabilities.maxImageCount))
677 {
678 imageCount = g_SurfaceCapabilities.maxImageCount;
679 }
680
681 VkSwapchainCreateInfoKHR swapChainInfo = { VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR };
682 swapChainInfo.surface = g_hSurface;
683 swapChainInfo.minImageCount = imageCount;
684 swapChainInfo.imageFormat = g_SurfaceFormat.format;
685 swapChainInfo.imageColorSpace = g_SurfaceFormat.colorSpace;
686 swapChainInfo.imageExtent = g_Extent;
687 swapChainInfo.imageArrayLayers = 1;
688 swapChainInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
689 swapChainInfo.preTransform = g_SurfaceCapabilities.currentTransform;
690 swapChainInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
691 swapChainInfo.presentMode = presentMode;
692 swapChainInfo.clipped = VK_TRUE;
693 swapChainInfo.oldSwapchain = g_hSwapchain;
694
695 uint32_t queueFamilyIndices[] = { g_GraphicsQueueFamilyIndex, g_PresentQueueFamilyIndex };
696 if(g_PresentQueueFamilyIndex != g_GraphicsQueueFamilyIndex)
697 {
698 swapChainInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
699 swapChainInfo.queueFamilyIndexCount = 2;
700 swapChainInfo.pQueueFamilyIndices = queueFamilyIndices;
701 }
702 else
703 {
704 swapChainInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
705 }
706
707 VkSwapchainKHR hNewSwapchain = VK_NULL_HANDLE;
708 ERR_GUARD_VULKAN( vkCreateSwapchainKHR(g_hDevice, &swapChainInfo, nullptr, &hNewSwapchain) );
709 if(g_hSwapchain != VK_NULL_HANDLE)
710 vkDestroySwapchainKHR(g_hDevice, g_hSwapchain, nullptr);
711 g_hSwapchain = hNewSwapchain;
712
713 // Retrieve swapchain images.
714
715 uint32_t swapchainImageCount = 0;
716 ERR_GUARD_VULKAN( vkGetSwapchainImagesKHR(g_hDevice, g_hSwapchain, &swapchainImageCount, nullptr) );
717 g_SwapchainImages.resize(swapchainImageCount);
718 ERR_GUARD_VULKAN( vkGetSwapchainImagesKHR(g_hDevice, g_hSwapchain, &swapchainImageCount, g_SwapchainImages.data()) );
719
720 // Create swapchain image views.
721
722 for(size_t i = g_SwapchainImageViews.size(); i--; )
723 vkDestroyImageView(g_hDevice, g_SwapchainImageViews[i], nullptr);
724 g_SwapchainImageViews.clear();
725
726 VkImageViewCreateInfo swapchainImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
727 g_SwapchainImageViews.resize(swapchainImageCount);
728 for(uint32_t i = 0; i < swapchainImageCount; ++i)
729 {
730 swapchainImageViewInfo.image = g_SwapchainImages[i];
731 swapchainImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
732 swapchainImageViewInfo.format = g_SurfaceFormat.format;
733 swapchainImageViewInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
734 swapchainImageViewInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
735 swapchainImageViewInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
736 swapchainImageViewInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
737 swapchainImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
738 swapchainImageViewInfo.subresourceRange.baseMipLevel = 0;
739 swapchainImageViewInfo.subresourceRange.levelCount = 1;
740 swapchainImageViewInfo.subresourceRange.baseArrayLayer = 0;
741 swapchainImageViewInfo.subresourceRange.layerCount = 1;
742 ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &swapchainImageViewInfo, nullptr, &g_SwapchainImageViews[i]) );
743 }
744
745 // Create depth buffer
746
747 g_DepthFormat = FindDepthFormat();
748 assert(g_DepthFormat != VK_FORMAT_UNDEFINED);
749
750 VkImageCreateInfo depthImageInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
751 depthImageInfo.imageType = VK_IMAGE_TYPE_2D;
752 depthImageInfo.extent.width = g_Extent.width;
753 depthImageInfo.extent.height = g_Extent.height;
754 depthImageInfo.extent.depth = 1;
755 depthImageInfo.mipLevels = 1;
756 depthImageInfo.arrayLayers = 1;
757 depthImageInfo.format = g_DepthFormat;
758 depthImageInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
759 depthImageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
760 depthImageInfo.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
761 depthImageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
762 depthImageInfo.samples = VK_SAMPLE_COUNT_1_BIT;
763 depthImageInfo.flags = 0;
764
765 VmaMemoryRequirements depthImageMemReq = {};
766 depthImageMemReq.usage = VMA_MEMORY_USAGE_GPU_ONLY;
767
768 ERR_GUARD_VULKAN( vmaCreateImage(g_hAllocator, &depthImageInfo, &depthImageMemReq, &g_hDepthImage, nullptr, nullptr) );
769
770 VkImageViewCreateInfo depthImageViewInfo = { VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO };
771 depthImageViewInfo.image = g_hDepthImage;
772 depthImageViewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
773 depthImageViewInfo.format = g_DepthFormat;
774 depthImageViewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
775 depthImageViewInfo.subresourceRange.baseMipLevel = 0;
776 depthImageViewInfo.subresourceRange.levelCount = 1;
777 depthImageViewInfo.subresourceRange.baseArrayLayer = 0;
778 depthImageViewInfo.subresourceRange.layerCount = 1;
779
780 ERR_GUARD_VULKAN( vkCreateImageView(g_hDevice, &depthImageViewInfo, nullptr, &g_hDepthImageView) );
781
782 TransitionImageLayout(
783 g_hDepthImage,
784 g_DepthFormat,
785 1,
786 VK_IMAGE_LAYOUT_UNDEFINED,
787 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
788
789 // Create pipeline layout
790 {
791 if(g_hPipelineLayout != VK_NULL_HANDLE)
792 {
793 vkDestroyPipelineLayout(g_hDevice, g_hPipelineLayout, nullptr);
794 g_hPipelineLayout = VK_NULL_HANDLE;
795 }
796
797 VkPushConstantRange pushConstantRanges[1];
798 ZeroMemory(&pushConstantRanges, sizeof pushConstantRanges);
799 pushConstantRanges[0].offset = 0;
800 pushConstantRanges[0].size = sizeof(UniformBufferObject);
801 pushConstantRanges[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
802
803 VkDescriptorSetLayout descriptorSetLayouts[] = { g_hDescriptorSetLayout };
804 VkPipelineLayoutCreateInfo pipelineLayoutInfo = { VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO };
805 pipelineLayoutInfo.setLayoutCount = 1;
806 pipelineLayoutInfo.pSetLayouts = descriptorSetLayouts;
807 pipelineLayoutInfo.pushConstantRangeCount = 1;
808 pipelineLayoutInfo.pPushConstantRanges = pushConstantRanges;
809 ERR_GUARD_VULKAN( vkCreatePipelineLayout(g_hDevice, &pipelineLayoutInfo, nullptr, &g_hPipelineLayout) );
810 }
811
812 // Create render pass
813 {
814 if(g_hRenderPass != VK_NULL_HANDLE)
815 {
816 vkDestroyRenderPass(g_hDevice, g_hRenderPass, nullptr);
817 g_hRenderPass = VK_NULL_HANDLE;
818 }
819
820 VkAttachmentDescription attachments[2];
821 ZeroMemory(attachments, sizeof(attachments));
822
823 attachments[0].format = g_SurfaceFormat.format;
824 attachments[0].samples = VK_SAMPLE_COUNT_1_BIT;
825 attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
826 attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
827 attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
828 attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
829 attachments[0].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
830 attachments[0].finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
831
832 attachments[1].format = g_DepthFormat;
833 attachments[1].samples = VK_SAMPLE_COUNT_1_BIT;
834 attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
835 attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
836 attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
837 attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
838 attachments[1].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
839 attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
840
841 VkAttachmentReference colorAttachmentRef = {};
842 colorAttachmentRef.attachment = 0;
843 colorAttachmentRef.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
844
845 VkAttachmentReference depthStencilAttachmentRef = {};
846 depthStencilAttachmentRef.attachment = 1;
847 depthStencilAttachmentRef.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
848
849 VkSubpassDescription subpassDesc = {};
850 subpassDesc.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
851 subpassDesc.colorAttachmentCount = 1;
852 subpassDesc.pColorAttachments = &colorAttachmentRef;
853 subpassDesc.pDepthStencilAttachment = &depthStencilAttachmentRef;
854
855 VkSubpassDependency subpassDependency = {};
856 subpassDependency.srcSubpass = VK_SUBPASS_EXTERNAL;
857 subpassDependency.dstSubpass = 0;
858 subpassDependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
859 subpassDependency.dstStageMask = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT;
860 subpassDependency.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
861 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
862 subpassDependency.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT |
863 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
864
865 VkRenderPassCreateInfo renderPassInfo = { VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO };
866 renderPassInfo.attachmentCount = (uint32_t)_countof(attachments);
867 renderPassInfo.pAttachments = attachments;
868 renderPassInfo.subpassCount = 1;
869 renderPassInfo.pSubpasses = &subpassDesc;
870 renderPassInfo.dependencyCount = 1;
871 renderPassInfo.pDependencies = &subpassDependency;
872 ERR_GUARD_VULKAN( vkCreateRenderPass(g_hDevice, &renderPassInfo, nullptr, &g_hRenderPass) );
873 }
874
875 // Create pipeline
876 {
877 std::vector<char> vertShaderCode;
878 LoadShader(vertShaderCode, "Shader.vert.spv");
879 VkShaderModuleCreateInfo shaderModuleInfo = { VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO };
880 shaderModuleInfo.codeSize = vertShaderCode.size();
881 shaderModuleInfo.pCode = (const uint32_t*)vertShaderCode.data();
882 VkShaderModule hVertShaderModule = VK_NULL_HANDLE;
883 ERR_GUARD_VULKAN( vkCreateShaderModule(g_hDevice, &shaderModuleInfo, nullptr, &hVertShaderModule) );
884
885 std::vector<char> hFragShaderCode;
886 LoadShader(hFragShaderCode, "Shader.frag.spv");
887 shaderModuleInfo.codeSize = hFragShaderCode.size();
888 shaderModuleInfo.pCode = (const uint32_t*)hFragShaderCode.data();
889 VkShaderModule fragShaderModule = VK_NULL_HANDLE;
890 ERR_GUARD_VULKAN( vkCreateShaderModule(g_hDevice, &shaderModuleInfo, nullptr, &fragShaderModule) );
891
892 VkPipelineShaderStageCreateInfo vertPipelineShaderStageInfo = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO };
893 vertPipelineShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT;
894 vertPipelineShaderStageInfo.module = hVertShaderModule;
895 vertPipelineShaderStageInfo.pName = "main";
896
897 VkPipelineShaderStageCreateInfo fragPipelineShaderStageInfo = { VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO };
898 fragPipelineShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT;
899 fragPipelineShaderStageInfo.module = fragShaderModule;
900 fragPipelineShaderStageInfo.pName = "main";
901
902 VkPipelineShaderStageCreateInfo pipelineShaderStageInfos[] = {
903 vertPipelineShaderStageInfo,
904 fragPipelineShaderStageInfo
905 };
906
907 VkVertexInputBindingDescription bindingDescription = {};
908 bindingDescription.binding = 0;
909 bindingDescription.stride = sizeof(Vertex);
910 bindingDescription.inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
911
912 VkVertexInputAttributeDescription attributeDescriptions[3];
913 ZeroMemory(attributeDescriptions, sizeof(attributeDescriptions));
914
915 attributeDescriptions[0].binding = 0;
916 attributeDescriptions[0].location = 0;
917 attributeDescriptions[0].format = VK_FORMAT_R32G32B32_SFLOAT;
918 attributeDescriptions[0].offset = offsetof(Vertex, pos);
919
920 attributeDescriptions[1].binding = 0;
921 attributeDescriptions[1].location = 1;
922 attributeDescriptions[1].format = VK_FORMAT_R32G32B32_SFLOAT;
923 attributeDescriptions[1].offset = offsetof(Vertex, color);
924
925 attributeDescriptions[2].binding = 0;
926 attributeDescriptions[2].location = 2;
927 attributeDescriptions[2].format = VK_FORMAT_R32G32_SFLOAT;
928 attributeDescriptions[2].offset = offsetof(Vertex, texCoord);
929
930 VkPipelineVertexInputStateCreateInfo pipelineVertexInputStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO };
931 pipelineVertexInputStateInfo.vertexBindingDescriptionCount = 1;
932 pipelineVertexInputStateInfo.pVertexBindingDescriptions = &bindingDescription;
933 pipelineVertexInputStateInfo.vertexAttributeDescriptionCount = _countof(attributeDescriptions);
934 pipelineVertexInputStateInfo.pVertexAttributeDescriptions = attributeDescriptions;
935
936 VkPipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO };
937 pipelineInputAssemblyStateInfo.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
938 pipelineInputAssemblyStateInfo.primitiveRestartEnable = VK_TRUE;
939
940 VkViewport viewport = {};
941 viewport.x = 0.f;
942 viewport.y = 0.f;
943 viewport.width = (float)g_Extent.width;
944 viewport.height = (float)g_Extent.height;
945 viewport.minDepth = 0.f;
946 viewport.maxDepth = 1.f;
947
948 VkRect2D scissor = {};
949 scissor.offset.x = 0;
950 scissor.offset.y = 0;
951 scissor.extent = g_Extent;
952
953 VkPipelineViewportStateCreateInfo pipelineViewportStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO };
954 pipelineViewportStateInfo.viewportCount = 1;
955 pipelineViewportStateInfo.pViewports = &viewport;
956 pipelineViewportStateInfo.scissorCount = 1;
957 pipelineViewportStateInfo.pScissors = &scissor;
958
959 VkPipelineRasterizationStateCreateInfo pipelineRasterizationStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO };
960 pipelineRasterizationStateInfo.depthClampEnable = VK_FALSE;
961 pipelineRasterizationStateInfo.rasterizerDiscardEnable = VK_FALSE;
962 pipelineRasterizationStateInfo.polygonMode = VK_POLYGON_MODE_FILL;
963 pipelineRasterizationStateInfo.lineWidth = 1.f;
964 pipelineRasterizationStateInfo.cullMode = VK_CULL_MODE_BACK_BIT;
965 pipelineRasterizationStateInfo.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
966 pipelineRasterizationStateInfo.depthBiasEnable = VK_FALSE;
967 pipelineRasterizationStateInfo.depthBiasConstantFactor = 0.f;
968 pipelineRasterizationStateInfo.depthBiasClamp = 0.f;
969 pipelineRasterizationStateInfo.depthBiasSlopeFactor = 0.f;
970
971 VkPipelineMultisampleStateCreateInfo pipelineMultisampleStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO };
972 pipelineMultisampleStateInfo.sampleShadingEnable = VK_FALSE;
973 pipelineMultisampleStateInfo.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
974 pipelineMultisampleStateInfo.minSampleShading = 1.f;
975 pipelineMultisampleStateInfo.pSampleMask = nullptr;
976 pipelineMultisampleStateInfo.alphaToCoverageEnable = VK_FALSE;
977 pipelineMultisampleStateInfo.alphaToOneEnable = VK_FALSE;
978
979 VkPipelineColorBlendAttachmentState pipelineColorBlendAttachmentState = {};
980 pipelineColorBlendAttachmentState.colorWriteMask =
981 VK_COLOR_COMPONENT_R_BIT |
982 VK_COLOR_COMPONENT_G_BIT |
983 VK_COLOR_COMPONENT_B_BIT |
984 VK_COLOR_COMPONENT_A_BIT;
985 pipelineColorBlendAttachmentState.blendEnable = VK_FALSE;
986 pipelineColorBlendAttachmentState.srcColorBlendFactor = VK_BLEND_FACTOR_ONE; // Optional
987 pipelineColorBlendAttachmentState.dstColorBlendFactor = VK_BLEND_FACTOR_ZERO; // Optional
988 pipelineColorBlendAttachmentState.colorBlendOp = VK_BLEND_OP_ADD; // Optional
989 pipelineColorBlendAttachmentState.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE; // Optional
990 pipelineColorBlendAttachmentState.dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO; // Optional
991 pipelineColorBlendAttachmentState.alphaBlendOp = VK_BLEND_OP_ADD; // Optional
992
993 VkPipelineColorBlendStateCreateInfo pipelineColorBlendStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO };
994 pipelineColorBlendStateInfo.logicOpEnable = VK_FALSE;
995 pipelineColorBlendStateInfo.logicOp = VK_LOGIC_OP_COPY;
996 pipelineColorBlendStateInfo.attachmentCount = 1;
997 pipelineColorBlendStateInfo.pAttachments = &pipelineColorBlendAttachmentState;
998
999 VkPipelineDepthStencilStateCreateInfo depthStencilStateInfo = { VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO };
1000 depthStencilStateInfo.depthTestEnable = VK_TRUE;
1001 depthStencilStateInfo.depthWriteEnable = VK_TRUE;
1002 depthStencilStateInfo.depthCompareOp = VK_COMPARE_OP_LESS;
1003 depthStencilStateInfo.depthBoundsTestEnable = VK_FALSE;
1004 depthStencilStateInfo.stencilTestEnable = VK_FALSE;
1005
1006 VkGraphicsPipelineCreateInfo pipelineInfo = { VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO };
1007 pipelineInfo.stageCount = 2;
1008 pipelineInfo.pStages = pipelineShaderStageInfos;
1009 pipelineInfo.pVertexInputState = &pipelineVertexInputStateInfo;
1010 pipelineInfo.pInputAssemblyState = &pipelineInputAssemblyStateInfo;
1011 pipelineInfo.pViewportState = &pipelineViewportStateInfo;
1012 pipelineInfo.pRasterizationState = &pipelineRasterizationStateInfo;
1013 pipelineInfo.pMultisampleState = &pipelineMultisampleStateInfo;
1014 pipelineInfo.pDepthStencilState = &depthStencilStateInfo;
1015 pipelineInfo.pColorBlendState = &pipelineColorBlendStateInfo;
1016 pipelineInfo.pDynamicState = nullptr;
1017 pipelineInfo.layout = g_hPipelineLayout;
1018 pipelineInfo.renderPass = g_hRenderPass;
1019 pipelineInfo.subpass = 0;
1020 pipelineInfo.basePipelineHandle = VK_NULL_HANDLE;
1021 pipelineInfo.basePipelineIndex = -1;
1022 ERR_GUARD_VULKAN( vkCreateGraphicsPipelines(
1023 g_hDevice,
1024 VK_NULL_HANDLE,
1025 1,
1026 &pipelineInfo, nullptr,
1027 &g_hPipeline) );
1028
1029 vkDestroyShaderModule(g_hDevice, fragShaderModule, nullptr);
1030 vkDestroyShaderModule(g_hDevice, hVertShaderModule, nullptr);
1031 }
1032
1033 // Create frambuffers
1034
1035 for(size_t i = g_Framebuffers.size(); i--; )
1036 vkDestroyFramebuffer(g_hDevice, g_Framebuffers[i], nullptr);
1037 g_Framebuffers.clear();
1038
1039 g_Framebuffers.resize(g_SwapchainImageViews.size());
1040 for(size_t i = 0; i < g_SwapchainImages.size(); ++i)
1041 {
1042 VkImageView attachments[] = { g_SwapchainImageViews[i], g_hDepthImageView };
1043
1044 VkFramebufferCreateInfo framebufferInfo = { VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO };
1045 framebufferInfo.renderPass = g_hRenderPass;
1046 framebufferInfo.attachmentCount = (uint32_t)_countof(attachments);
1047 framebufferInfo.pAttachments = attachments;
1048 framebufferInfo.width = g_Extent.width;
1049 framebufferInfo.height = g_Extent.height;
1050 framebufferInfo.layers = 1;
1051 ERR_GUARD_VULKAN( vkCreateFramebuffer(g_hDevice, &framebufferInfo, nullptr, &g_Framebuffers[i]) );
1052 }
1053
1054 // Create semaphores
1055
1056 if(g_hImageAvailableSemaphore != VK_NULL_HANDLE)
1057 {
1058 vkDestroySemaphore(g_hDevice, g_hImageAvailableSemaphore, nullptr);
1059 g_hImageAvailableSemaphore = VK_NULL_HANDLE;
1060 }
1061 if(g_hRenderFinishedSemaphore != VK_NULL_HANDLE)
1062 {
1063 vkDestroySemaphore(g_hDevice, g_hRenderFinishedSemaphore, nullptr);
1064 g_hRenderFinishedSemaphore = VK_NULL_HANDLE;
1065 }
1066
1067 VkSemaphoreCreateInfo semaphoreInfo = { VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO };
1068 ERR_GUARD_VULKAN( vkCreateSemaphore(g_hDevice, &semaphoreInfo, nullptr, &g_hImageAvailableSemaphore) );
1069 ERR_GUARD_VULKAN( vkCreateSemaphore(g_hDevice, &semaphoreInfo, nullptr, &g_hRenderFinishedSemaphore) );
1070}
1071
1072static void DestroySwapchain(bool destroyActualSwapchain)
1073{
1074 if(g_hImageAvailableSemaphore != VK_NULL_HANDLE)
1075 {
1076 vkDestroySemaphore(g_hDevice, g_hImageAvailableSemaphore, nullptr);
1077 g_hImageAvailableSemaphore = VK_NULL_HANDLE;
1078 }
1079 if(g_hRenderFinishedSemaphore != VK_NULL_HANDLE)
1080 {
1081 vkDestroySemaphore(g_hDevice, g_hRenderFinishedSemaphore, nullptr);
1082 g_hRenderFinishedSemaphore = VK_NULL_HANDLE;
1083 }
1084
1085 for(size_t i = g_Framebuffers.size(); i--; )
1086 vkDestroyFramebuffer(g_hDevice, g_Framebuffers[i], nullptr);
1087 g_Framebuffers.clear();
1088
1089 if(g_hDepthImageView != VK_NULL_HANDLE)
1090 {
1091 vkDestroyImageView(g_hDevice, g_hDepthImageView, nullptr);
1092 g_hDepthImageView = VK_NULL_HANDLE;
1093 }
1094 if(g_hDepthImage != VK_NULL_HANDLE)
1095 {
1096 vmaDestroyImage(g_hAllocator, g_hDepthImage);
1097 g_hDepthImage = VK_NULL_HANDLE;
1098 }
1099
1100 if(g_hPipeline != VK_NULL_HANDLE)
1101 {
1102 vkDestroyPipeline(g_hDevice, g_hPipeline, nullptr);
1103 g_hPipeline = VK_NULL_HANDLE;
1104 }
1105
1106 if(g_hRenderPass != VK_NULL_HANDLE)
1107 {
1108 vkDestroyRenderPass(g_hDevice, g_hRenderPass, nullptr);
1109 g_hRenderPass = VK_NULL_HANDLE;
1110 }
1111
1112 if(g_hPipelineLayout != VK_NULL_HANDLE)
1113 {
1114 vkDestroyPipelineLayout(g_hDevice, g_hPipelineLayout, nullptr);
1115 g_hPipelineLayout = VK_NULL_HANDLE;
1116 }
1117
1118 for(size_t i = g_SwapchainImageViews.size(); i--; )
1119 vkDestroyImageView(g_hDevice, g_SwapchainImageViews[i], nullptr);
1120 g_SwapchainImageViews.clear();
1121
1122 if(destroyActualSwapchain && (g_hSwapchain != VK_NULL_HANDLE))
1123 {
1124 vkDestroySwapchainKHR(g_hDevice, g_hSwapchain, nullptr);
1125 g_hSwapchain = VK_NULL_HANDLE;
1126 }
1127}
1128
1129static void InitializeApplication()
1130{
1131 uint32_t instanceLayerPropCount = 0;
1132 ERR_GUARD_VULKAN( vkEnumerateInstanceLayerProperties(&instanceLayerPropCount, nullptr) );
1133 std::vector<VkLayerProperties> instanceLayerProps(instanceLayerPropCount);
1134 if(instanceLayerPropCount > 0)
1135 {
1136 ERR_GUARD_VULKAN( vkEnumerateInstanceLayerProperties(&instanceLayerPropCount, instanceLayerProps.data()) );
1137 }
1138
1139 if(g_EnableValidationLayer == true)
1140 {
1141 if(IsLayerSupported(instanceLayerProps.data(), instanceLayerProps.size(), VALIDATION_LAYER_NAME) == false)
1142 {
1143 printf("Layer \"%s\" not supported.", VALIDATION_LAYER_NAME);
1144 g_EnableValidationLayer = false;
1145 }
1146 }
1147
1148 std::vector<const char*> instanceExtensions;
1149 instanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
1150 instanceExtensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
1151
1152 std::vector<const char*> instanceLayers;
1153 if(g_EnableValidationLayer == true)
1154 {
1155 instanceLayers.push_back(VALIDATION_LAYER_NAME);
1156 instanceExtensions.push_back("VK_EXT_debug_report");
1157 }
1158
1159 VkApplicationInfo appInfo = { VK_STRUCTURE_TYPE_APPLICATION_INFO };
1160 appInfo.pApplicationName = APP_TITLE_A;
1161 appInfo.applicationVersion = VK_MAKE_VERSION(1, 0, 0);
1162 appInfo.pEngineName = "Adam Sawicki Engine";
1163 appInfo.engineVersion = VK_MAKE_VERSION(1, 0, 0);
1164 appInfo.apiVersion = VK_API_VERSION_1_0;
1165
1166 VkInstanceCreateInfo instInfo = { VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO };
1167 instInfo.pApplicationInfo = &appInfo;
1168 instInfo.enabledExtensionCount = static_cast<uint32_t>(instanceExtensions.size());
1169 instInfo.ppEnabledExtensionNames = instanceExtensions.data();
1170 instInfo.enabledLayerCount = static_cast<uint32_t>(instanceLayers.size());
1171 instInfo.ppEnabledLayerNames = instanceLayers.data();
1172
1173 ERR_GUARD_VULKAN( vkCreateInstance(&instInfo, NULL, &g_hVulkanInstance) );
1174
1175 // Create VkSurfaceKHR.
1176 VkWin32SurfaceCreateInfoKHR surfaceInfo = { VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR };
1177 surfaceInfo.hinstance = g_hAppInstance;
1178 surfaceInfo.hwnd = g_hWnd;
1179 VkResult result = vkCreateWin32SurfaceKHR(g_hVulkanInstance, &surfaceInfo, NULL, &g_hSurface);
1180 assert(result == VK_SUCCESS);
1181
1182 if(g_EnableValidationLayer == true)
1183 RegisterDebugCallbacks();
1184
1185 // Find physical device
1186
1187 uint32_t deviceCount = 0;
1188 ERR_GUARD_VULKAN( vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, nullptr) );
1189 assert(deviceCount > 0);
1190
1191 std::vector<VkPhysicalDevice> physicalDevices(deviceCount);
1192 ERR_GUARD_VULKAN( vkEnumeratePhysicalDevices(g_hVulkanInstance, &deviceCount, physicalDevices.data()) );
1193
1194 g_hPhysicalDevice = physicalDevices[0];
1195
1196 // Query for features
1197
1198 VkPhysicalDeviceProperties physicalDeviceProperties = {};
1199 vkGetPhysicalDeviceProperties(g_hPhysicalDevice, &physicalDeviceProperties);
1200
1201 //VkPhysicalDeviceFeatures physicalDeviceFreatures = {};
1202 //vkGetPhysicalDeviceFeatures(g_PhysicalDevice, &physicalDeviceFreatures);
1203
1204 // Find queue family index
1205
1206 uint32_t queueFamilyCount = 0;
1207 vkGetPhysicalDeviceQueueFamilyProperties(g_hPhysicalDevice, &queueFamilyCount, nullptr);
1208 assert(queueFamilyCount > 0);
1209 std::vector<VkQueueFamilyProperties> queueFamilies(queueFamilyCount);
1210 vkGetPhysicalDeviceQueueFamilyProperties(g_hPhysicalDevice, &queueFamilyCount, queueFamilies.data());
1211 for(uint32_t i = 0;
1212 (i < queueFamilyCount) &&
1213 (g_GraphicsQueueFamilyIndex == UINT_MAX || g_PresentQueueFamilyIndex == UINT_MAX);
1214 ++i)
1215 {
1216 if(queueFamilies[i].queueCount > 0)
1217 {
1218 if((g_GraphicsQueueFamilyIndex != 0) &&
1219 ((queueFamilies[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0))
1220 {
1221 g_GraphicsQueueFamilyIndex = i;
1222 }
1223
1224 VkBool32 surfaceSupported = 0;
1225 VkResult res = vkGetPhysicalDeviceSurfaceSupportKHR(g_hPhysicalDevice, i, g_hSurface, &surfaceSupported);
1226 if((res >= 0) && (surfaceSupported == VK_TRUE))
1227 {
1228 g_PresentQueueFamilyIndex = i;
1229 }
1230 }
1231 }
1232 assert(g_GraphicsQueueFamilyIndex != UINT_MAX);
1233
1234 // Create logical device
1235
1236 const float queuePriority = 1.f;
1237
1238 VkDeviceQueueCreateInfo deviceQueueCreateInfo[2] = { VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO };
1239 deviceQueueCreateInfo[0].queueFamilyIndex = g_GraphicsQueueFamilyIndex;
1240 deviceQueueCreateInfo[0].queueCount = 1;
1241 deviceQueueCreateInfo[0].pQueuePriorities = &queuePriority;
1242 deviceQueueCreateInfo[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
1243 deviceQueueCreateInfo[1].queueFamilyIndex = g_PresentQueueFamilyIndex;
1244 deviceQueueCreateInfo[1].queueCount = 1;
1245 deviceQueueCreateInfo[1].pQueuePriorities = &queuePriority;
1246
1247 VkPhysicalDeviceFeatures deviceFeatures = {};
1248 deviceFeatures.fillModeNonSolid = VK_TRUE;
1249 deviceFeatures.samplerAnisotropy = VK_TRUE;
1250
1251 std::vector<const char*> enabledDeviceExtensions;
1252 enabledDeviceExtensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
1253
1254 VkDeviceCreateInfo deviceCreateInfo = { VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO };
1255 deviceCreateInfo.enabledLayerCount = 0;
1256 deviceCreateInfo.ppEnabledLayerNames = nullptr;
1257 deviceCreateInfo.enabledExtensionCount = (uint32_t)enabledDeviceExtensions.size();
1258 deviceCreateInfo.ppEnabledExtensionNames = enabledDeviceExtensions.data();
1259 deviceCreateInfo.queueCreateInfoCount = g_PresentQueueFamilyIndex != g_GraphicsQueueFamilyIndex ? 2 : 1;
1260 deviceCreateInfo.pQueueCreateInfos = deviceQueueCreateInfo;
1261 deviceCreateInfo.pEnabledFeatures = &deviceFeatures;
1262
1263 ERR_GUARD_VULKAN( vkCreateDevice(g_hPhysicalDevice, &deviceCreateInfo, nullptr, &g_hDevice) );
1264
1265 // Create memory allocator
1266
1267 VmaAllocatorCreateInfo allocatorInfo = {};
1268 allocatorInfo.physicalDevice = g_hPhysicalDevice;
1269 allocatorInfo.device = g_hDevice;
1270 ERR_GUARD_VULKAN( vmaCreateAllocator(&allocatorInfo, &g_hAllocator) );
1271
1272 // Retrieve queue (doesn't need to be destroyed)
1273
1274 vkGetDeviceQueue(g_hDevice, g_GraphicsQueueFamilyIndex, 0, &g_hGraphicsQueue);
1275 vkGetDeviceQueue(g_hDevice, g_PresentQueueFamilyIndex, 0, &g_hPresentQueue);
1276 assert(g_hGraphicsQueue);
1277 assert(g_hPresentQueue);
1278
1279 // Create command pool
1280
1281 VkCommandPoolCreateInfo commandPoolInfo = { VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO };
1282 commandPoolInfo.queueFamilyIndex = g_GraphicsQueueFamilyIndex;
1283 commandPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
1284 ERR_GUARD_VULKAN( vkCreateCommandPool(g_hDevice, &commandPoolInfo, nullptr, &g_hCommandPool) );
1285
1286 VkCommandBufferAllocateInfo commandBufferInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO };
1287 commandBufferInfo.commandPool = g_hCommandPool;
1288 commandBufferInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
1289 commandBufferInfo.commandBufferCount = COMMAND_BUFFER_COUNT;
1290 ERR_GUARD_VULKAN( vkAllocateCommandBuffers(g_hDevice, &commandBufferInfo, g_MainCommandBuffers) );
1291
1292 VkFenceCreateInfo fenceInfo = { VK_STRUCTURE_TYPE_FENCE_CREATE_INFO };
1293 fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
1294 for(size_t i = 0; i < COMMAND_BUFFER_COUNT; ++i)
1295 {
1296 ERR_GUARD_VULKAN( vkCreateFence(g_hDevice, &fenceInfo, nullptr, &g_MainCommandBufferExecutedFances[i]) );
1297 }
1298
1299 commandBufferInfo.commandBufferCount = 1;
1300 ERR_GUARD_VULKAN( vkAllocateCommandBuffers(g_hDevice, &commandBufferInfo, &g_hTemporaryCommandBuffer) );
1301
1302 // Create texture sampler
1303
1304 VkSamplerCreateInfo samplerInfo = { VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO };
1305 samplerInfo.magFilter = VK_FILTER_LINEAR;
1306 samplerInfo.minFilter = VK_FILTER_LINEAR;
1307 samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1308 samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1309 samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
1310 samplerInfo.anisotropyEnable = VK_TRUE;
1311 samplerInfo.maxAnisotropy = 16;
1312 samplerInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
1313 samplerInfo.unnormalizedCoordinates = VK_FALSE;
1314 samplerInfo.compareEnable = VK_FALSE;
1315 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
1316 samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
1317 samplerInfo.mipLodBias = 0.f;
1318 samplerInfo.minLod = 0.f;
1319 samplerInfo.maxLod = FLT_MAX;
1320 ERR_GUARD_VULKAN( vkCreateSampler(g_hDevice, &samplerInfo, nullptr, &g_hSampler) );
1321
1322 CreateTexture(128, 128);
1323 CreateMesh();
1324
1325 VkDescriptorSetLayoutBinding samplerLayoutBinding = {};
1326 samplerLayoutBinding.binding = 1;
1327 samplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1328 samplerLayoutBinding.descriptorCount = 1;
1329 samplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
1330
1331 VkDescriptorSetLayoutCreateInfo descriptorSetLayoutInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO };
1332 descriptorSetLayoutInfo.bindingCount = 1;
1333 descriptorSetLayoutInfo.pBindings = &samplerLayoutBinding;
1334 ERR_GUARD_VULKAN( vkCreateDescriptorSetLayout(g_hDevice, &descriptorSetLayoutInfo, nullptr, &g_hDescriptorSetLayout) );
1335
1336 // Create descriptor pool
1337
1338 VkDescriptorPoolSize descriptorPoolSizes[2];
1339 ZeroMemory(descriptorPoolSizes, sizeof(descriptorPoolSizes));
1340 descriptorPoolSizes[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
1341 descriptorPoolSizes[0].descriptorCount = 1;
1342 descriptorPoolSizes[1].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1343 descriptorPoolSizes[1].descriptorCount = 1;
1344
1345 VkDescriptorPoolCreateInfo descriptorPoolInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO };
1346 descriptorPoolInfo.poolSizeCount = (uint32_t)_countof(descriptorPoolSizes);
1347 descriptorPoolInfo.pPoolSizes = descriptorPoolSizes;
1348 descriptorPoolInfo.maxSets = 1;
1349 ERR_GUARD_VULKAN( vkCreateDescriptorPool(g_hDevice, &descriptorPoolInfo, nullptr, &g_hDescriptorPool) );
1350
1351 // Create descriptor set layout
1352
1353 VkDescriptorSetLayout descriptorSetLayouts[] = { g_hDescriptorSetLayout };
1354 VkDescriptorSetAllocateInfo descriptorSetInfo = { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO };
1355 descriptorSetInfo.descriptorPool = g_hDescriptorPool;
1356 descriptorSetInfo.descriptorSetCount = 1;
1357 descriptorSetInfo.pSetLayouts = descriptorSetLayouts;
1358 ERR_GUARD_VULKAN( vkAllocateDescriptorSets(g_hDevice, &descriptorSetInfo, &g_hDescriptorSet) );
1359
1360 VkDescriptorImageInfo descriptorImageInfo = {};
1361 descriptorImageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1362 descriptorImageInfo.imageView = g_hTextureImageView;
1363 descriptorImageInfo.sampler = g_hSampler;
1364
1365 VkWriteDescriptorSet writeDescriptorSet = { VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET };
1366 writeDescriptorSet.dstSet = g_hDescriptorSet;
1367 writeDescriptorSet.dstBinding = 1;
1368 writeDescriptorSet.dstArrayElement = 0;
1369 writeDescriptorSet.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1370 writeDescriptorSet.descriptorCount = 1;
1371 writeDescriptorSet.pImageInfo = &descriptorImageInfo;
1372
1373 vkUpdateDescriptorSets(g_hDevice, 1, &writeDescriptorSet, 0, nullptr);
1374
1375 CreateSwapchain();
1376}
1377
1378static void FinalizeApplication()
1379{
1380 vkDeviceWaitIdle(g_hDevice);
1381
1382 DestroySwapchain(true);
1383
1384 if(g_hDescriptorPool != VK_NULL_HANDLE)
1385 {
1386 vkDestroyDescriptorPool(g_hDevice, g_hDescriptorPool, nullptr);
1387 g_hDescriptorPool = VK_NULL_HANDLE;
1388 }
1389
1390 if(g_hDescriptorSetLayout != VK_NULL_HANDLE)
1391 {
1392 vkDestroyDescriptorSetLayout(g_hDevice, g_hDescriptorSetLayout, nullptr);
1393 g_hDescriptorSetLayout = VK_NULL_HANDLE;
1394 }
1395
1396 if(g_hTextureImageView != VK_NULL_HANDLE)
1397 {
1398 vkDestroyImageView(g_hDevice, g_hTextureImageView, nullptr);
1399 g_hTextureImageView = VK_NULL_HANDLE;
1400 }
1401 if(g_hTextureImage != VK_NULL_HANDLE)
1402 {
1403 vmaDestroyImage(g_hAllocator, g_hTextureImage);
1404 g_hTextureImage = VK_NULL_HANDLE;
1405 }
1406
1407 if(g_hIndexBuffer != VK_NULL_HANDLE)
1408 {
1409 vmaDestroyBuffer(g_hAllocator, g_hIndexBuffer);
1410 g_hIndexBuffer = VK_NULL_HANDLE;
1411 }
1412 if(g_hVertexBuffer != VK_NULL_HANDLE)
1413 {
1414 vmaDestroyBuffer(g_hAllocator, g_hVertexBuffer);
1415 g_hVertexBuffer = VK_NULL_HANDLE;
1416 }
1417
1418 if(g_hSampler != VK_NULL_HANDLE)
1419 {
1420 vkDestroySampler(g_hDevice, g_hSampler, nullptr);
1421 g_hSampler = VK_NULL_HANDLE;
1422 }
1423
1424 for(size_t i = COMMAND_BUFFER_COUNT; i--; )
1425 {
1426 if(g_MainCommandBufferExecutedFances[i] != VK_NULL_HANDLE)
1427 {
1428 vkDestroyFence(g_hDevice, g_MainCommandBufferExecutedFances[i], nullptr);
1429 g_MainCommandBufferExecutedFances[i] = VK_NULL_HANDLE;
1430 }
1431 }
1432 if(g_MainCommandBuffers[0] != VK_NULL_HANDLE)
1433 {
1434 vkFreeCommandBuffers(g_hDevice, g_hCommandPool, COMMAND_BUFFER_COUNT, g_MainCommandBuffers);
1435 ZeroMemory(g_MainCommandBuffers, sizeof(g_MainCommandBuffers));
1436 }
1437 if(g_hTemporaryCommandBuffer != VK_NULL_HANDLE)
1438 {
1439 vkFreeCommandBuffers(g_hDevice, g_hCommandPool, 1, &g_hTemporaryCommandBuffer);
1440 g_hTemporaryCommandBuffer = VK_NULL_HANDLE;
1441 }
1442
1443 if(g_hCommandPool != VK_NULL_HANDLE)
1444 {
1445 vkDestroyCommandPool(g_hDevice, g_hCommandPool, nullptr);
1446 g_hCommandPool = VK_NULL_HANDLE;
1447 }
1448
1449 if(g_hAllocator != VK_NULL_HANDLE)
1450 {
1451 vmaDestroyAllocator(g_hAllocator);
1452 g_hAllocator = nullptr;
1453 }
1454
1455 if(g_hDevice != VK_NULL_HANDLE)
1456 {
1457 vkDestroyDevice(g_hDevice, nullptr);
1458 g_hDevice = nullptr;
1459 }
1460
1461 if(g_pvkDestroyDebugReportCallbackEXT && g_hCallback != VK_NULL_HANDLE)
1462 {
1463 g_pvkDestroyDebugReportCallbackEXT(g_hVulkanInstance, g_hCallback, nullptr);
1464 g_hCallback = VK_NULL_HANDLE;
1465 }
1466
1467 if(g_hSurface != VK_NULL_HANDLE)
1468 {
1469 vkDestroySurfaceKHR(g_hVulkanInstance, g_hSurface, NULL);
1470 g_hSurface = VK_NULL_HANDLE;
1471 }
1472
1473 if(g_hVulkanInstance != VK_NULL_HANDLE)
1474 {
1475 vkDestroyInstance(g_hVulkanInstance, NULL);
1476 g_hVulkanInstance = VK_NULL_HANDLE;
1477 }
1478}
1479
1480static void PrintAllocatorStats()
1481{
1482#if VMA_STATS_STRING_ENABLED
1483 char* statsString = nullptr;
1484 vmaBuildStatsString(g_hAllocator, &statsString, true);
1485 printf("%s\n", statsString);
1486 vmaFreeStatsString(g_hAllocator, statsString);
1487#endif
1488}
1489
1490static void RecreateSwapChain()
1491{
1492 vkDeviceWaitIdle(g_hDevice);
1493 DestroySwapchain(false);
1494 CreateSwapchain();
1495}
1496
1497static void DrawFrame()
1498{
1499 // Begin main command buffer
1500 size_t cmdBufIndex = (g_NextCommandBufferIndex++) % COMMAND_BUFFER_COUNT;
1501 VkCommandBuffer hCommandBuffer = g_MainCommandBuffers[cmdBufIndex];
1502 VkFence hCommandBufferExecutedFence = g_MainCommandBufferExecutedFances[cmdBufIndex];
1503
1504 ERR_GUARD_VULKAN( vkWaitForFences(g_hDevice, 1, &hCommandBufferExecutedFence, VK_TRUE, UINT64_MAX) );
1505 ERR_GUARD_VULKAN( vkResetFences(g_hDevice, 1, &hCommandBufferExecutedFence) );
1506
1507 VkCommandBufferBeginInfo commandBufferBeginInfo = { VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO };
1508 commandBufferBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
1509 ERR_GUARD_VULKAN( vkBeginCommandBuffer(hCommandBuffer, &commandBufferBeginInfo) );
1510
1511 // Acquire swapchain image
1512 uint32_t imageIndex = 0;
1513 VkResult res = vkAcquireNextImageKHR(g_hDevice, g_hSwapchain, UINT64_MAX, g_hImageAvailableSemaphore, VK_NULL_HANDLE, &imageIndex);
1514 if(res == VK_ERROR_OUT_OF_DATE_KHR)
1515 {
1516 RecreateSwapChain();
1517 return;
1518 }
1519 else if(res < 0)
1520 {
1521 ERR_GUARD_VULKAN(res);
1522 }
1523
1524 // Record geometry pass
1525
1526 VkClearValue clearValues[2];
1527 ZeroMemory(clearValues, sizeof(clearValues));
1528 clearValues[0].color.float32[0] = 0.25f;
1529 clearValues[0].color.float32[1] = 0.25f;
1530 clearValues[0].color.float32[2] = 0.5f;
1531 clearValues[0].color.float32[3] = 1.0f;
1532 clearValues[1].depthStencil.depth = 1.0f;
1533
1534 VkRenderPassBeginInfo renderPassBeginInfo = { VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO };
1535 renderPassBeginInfo.renderPass = g_hRenderPass;
1536 renderPassBeginInfo.framebuffer = g_Framebuffers[imageIndex];
1537 renderPassBeginInfo.renderArea.offset.x = 0;
1538 renderPassBeginInfo.renderArea.offset.y = 0;
1539 renderPassBeginInfo.renderArea.extent = g_Extent;
1540 renderPassBeginInfo.clearValueCount = (uint32_t)_countof(clearValues);
1541 renderPassBeginInfo.pClearValues = clearValues;
1542 vkCmdBeginRenderPass(hCommandBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
1543
1544 vkCmdBindPipeline(
1545 hCommandBuffer,
1546 VK_PIPELINE_BIND_POINT_GRAPHICS,
1547 g_hPipeline);
1548
1549 mathfu::mat4 view = mathfu::mat4::LookAt(
1550 mathfu::kZeros3f,
1551 mathfu::vec3(0.f, -2.f, 4.f),
1552 mathfu::kAxisY3f);
1553 mathfu::mat4 proj = mathfu::mat4::Perspective(
1554 1.0471975511966f, // 60 degrees
1555 (float)g_Extent.width / (float)g_Extent.height,
1556 0.1f,
1557 1000.f,
1558 -1.f);
1559 //proj[1][1] *= -1.f;
1560 mathfu::mat4 viewProj = proj * view;
1561
1562 vkCmdBindDescriptorSets(
1563 hCommandBuffer,
1564 VK_PIPELINE_BIND_POINT_GRAPHICS,
1565 g_hPipelineLayout,
1566 0,
1567 1,
1568 &g_hDescriptorSet,
1569 0,
1570 nullptr);
1571
1572 float rotationAngle = (float)GetTickCount() * 0.001f * (float)M_PI * 0.2f;
1573 mathfu::mat3 model_3 = mathfu::mat3::RotationY(rotationAngle);
1574 mathfu::mat4 model_4 = mathfu::mat4(
1575 model_3(0, 0), model_3(0, 1), model_3(0, 2), 0.f,
1576 model_3(1, 0), model_3(1, 1), model_3(1, 2), 0.f,
1577 model_3(2, 0), model_3(2, 1), model_3(2, 2), 0.f,
1578 0.f, 0.f, 0.f, 1.f);
1579 mathfu::mat4 modelViewProj = viewProj * model_4;
1580
1581 UniformBufferObject ubo = {};
1582 modelViewProj.Pack(ubo.ModelViewProj);
1583 vkCmdPushConstants(hCommandBuffer, g_hPipelineLayout, VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(UniformBufferObject), &ubo);
1584
1585 VkBuffer vertexBuffers[] = { g_hVertexBuffer };
1586 VkDeviceSize offsets[] = { 0 };
1587 vkCmdBindVertexBuffers(hCommandBuffer, 0, 1, vertexBuffers, offsets);
1588
1589 vkCmdBindIndexBuffer(hCommandBuffer, g_hIndexBuffer, 0, VK_INDEX_TYPE_UINT16);
1590
1591 vkCmdDrawIndexed(hCommandBuffer, g_IndexCount, 1, 0, 0, 0);
1592
1593 vkCmdEndRenderPass(hCommandBuffer);
1594
1595 vkEndCommandBuffer(hCommandBuffer);
1596
1597 // Submit command buffer
1598
1599 VkSemaphore submitWaitSemaphores[] = { g_hImageAvailableSemaphore };
1600 VkPipelineStageFlags submitWaitStages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT };
1601 VkSemaphore submitSignalSemaphores[] = { g_hRenderFinishedSemaphore };
1602 VkSubmitInfo submitInfo = { VK_STRUCTURE_TYPE_SUBMIT_INFO };
1603 submitInfo.waitSemaphoreCount = 1;
1604 submitInfo.pWaitSemaphores = submitWaitSemaphores;
1605 submitInfo.pWaitDstStageMask = submitWaitStages;
1606 submitInfo.commandBufferCount = 1;
1607 submitInfo.pCommandBuffers = &hCommandBuffer;
1608 submitInfo.signalSemaphoreCount = _countof(submitSignalSemaphores);
1609 submitInfo.pSignalSemaphores = submitSignalSemaphores;
1610 ERR_GUARD_VULKAN( vkQueueSubmit(g_hGraphicsQueue, 1, &submitInfo, hCommandBufferExecutedFence) );
1611
1612 VkSemaphore presentWaitSemaphores[] = { g_hRenderFinishedSemaphore };
1613
1614 VkSwapchainKHR swapchains[] = { g_hSwapchain };
1615 VkPresentInfoKHR presentInfo = { VK_STRUCTURE_TYPE_PRESENT_INFO_KHR };
1616 presentInfo.waitSemaphoreCount = _countof(presentWaitSemaphores);
1617 presentInfo.pWaitSemaphores = presentWaitSemaphores;
1618 presentInfo.swapchainCount = 1;
1619 presentInfo.pSwapchains = swapchains;
1620 presentInfo.pImageIndices = &imageIndex;
1621 presentInfo.pResults = nullptr;
1622 res = vkQueuePresentKHR(g_hPresentQueue, &presentInfo);
1623 if(res == VK_ERROR_OUT_OF_DATE_KHR)
1624 {
1625 RecreateSwapChain();
1626 }
1627 else
1628 ERR_GUARD_VULKAN(res);
1629}
1630
1631static void HandlePossibleSizeChange()
1632{
1633 RECT clientRect;
1634 GetClientRect(g_hWnd, &clientRect);
1635 LONG newSizeX = clientRect.right - clientRect.left;
1636 LONG newSizeY = clientRect.bottom - clientRect.top;
1637 if((newSizeX > 0) &&
1638 (newSizeY > 0) &&
1639 ((newSizeX != g_SizeX) || (newSizeY != g_SizeY)))
1640 {
1641 g_SizeX = newSizeX;
1642 g_SizeY = newSizeY;
1643
1644 RecreateSwapChain();
1645 }
1646}
1647
1648static LRESULT WINAPI WndProc(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam)
1649{
1650 switch(msg)
1651 {
1652 case WM_CREATE:
1653 // This is intentionally assigned here because we are now inside CreateWindow, before it returns.
1654 g_hWnd = hWnd;
1655 InitializeApplication();
1656 PrintAllocatorStats();
1657 return 0;
1658
1659 case WM_DESTROY:
1660 FinalizeApplication();
1661 PostQuitMessage(0);
1662 return 0;
1663
1664 // This prevents app from freezing when left Alt is pressed
1665 // (which normally enters modal menu loop).
1666 case WM_SYSKEYDOWN:
1667 case WM_SYSKEYUP:
1668 return 0;
1669
1670 case WM_SIZE:
1671 if((wParam == SIZE_MAXIMIZED) || (wParam == SIZE_RESTORED))
1672 HandlePossibleSizeChange();
1673 return 0;
1674
1675 case WM_EXITSIZEMOVE:
1676 HandlePossibleSizeChange();
1677 return 0;
1678
1679 case WM_KEYDOWN:
1680 if(wParam == VK_ESCAPE)
1681 PostMessage(hWnd, WM_CLOSE, 0, 0);
1682 return 0;
1683
1684 default:
1685 break;
1686 }
1687
1688 return DefWindowProc(hWnd, msg, wParam, lParam);
1689}
1690
1691int main()
1692{
1693 g_hAppInstance = (HINSTANCE)GetModuleHandle(NULL);
1694
1695 WNDCLASSEX wndClassDesc = { sizeof(WNDCLASSEX) };
1696 wndClassDesc.style = CS_VREDRAW | CS_HREDRAW | CS_DBLCLKS;
1697 wndClassDesc.hbrBackground = NULL;
1698 wndClassDesc.hCursor = LoadCursor(NULL, IDC_CROSS);
1699 wndClassDesc.hIcon = LoadIcon(NULL, IDI_APPLICATION);
1700 wndClassDesc.hInstance = g_hAppInstance;
1701 wndClassDesc.lpfnWndProc = WndProc;
1702 wndClassDesc.lpszClassName = WINDOW_CLASS_NAME;
1703
1704 const ATOM hWndClass = RegisterClassEx(&wndClassDesc);
1705 assert(hWndClass);
1706
1707 const DWORD style = WS_VISIBLE | WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_MINIMIZEBOX | WS_MAXIMIZEBOX | WS_THICKFRAME;
1708 const DWORD exStyle = 0;
1709
1710 RECT rect = { 0, 0, g_SizeX, g_SizeY };
1711 AdjustWindowRectEx(&rect, style, FALSE, exStyle);
1712
Adam Sawicki86ccd632017-07-04 14:57:53 +02001713 CreateWindowEx(
Adam Sawickie6e498f2017-06-16 17:21:31 +02001714 exStyle, WINDOW_CLASS_NAME, APP_TITLE_W, style,
1715 CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT,
1716 NULL, NULL, g_hAppInstance, NULL);
1717
1718 MSG msg;
1719 for(;;)
1720 {
1721 if(PeekMessage(&msg, NULL, 0, 0, PM_REMOVE))
1722 {
1723 if(msg.message == WM_QUIT)
1724 break;
1725 TranslateMessage(&msg);
1726 DispatchMessage(&msg);
1727 }
1728 if(g_hDevice != VK_NULL_HANDLE)
1729 DrawFrame();
1730 }
1731
1732 return 0;
1733}