blob: 0f9539c2bca76734d73ec226eb042d6c9167d984 [file] [log] [blame]
Karl Schultz6addd812016-02-02 17:17:23 -07001/*
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002 * Copyright (c) 2015-2022 The Khronos Group Inc.
3 * Copyright (c) 2015-2022 Valve Corporation
4 * Copyright (c) 2015-2022 LunarG, Inc.
Karl Schultz6addd812016-02-02 17:17:23 -07005 *
Jon Ashburn3ebf1252016-04-19 11:30:31 -06006 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
Karl Schultz6addd812016-02-02 17:17:23 -07009 *
Jon Ashburn3ebf1252016-04-19 11:30:31 -060010 * http://www.apache.org/licenses/LICENSE-2.0
Karl Schultz6addd812016-02-02 17:17:23 -070011 *
Jon Ashburn3ebf1252016-04-19 11:30:31 -060012 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
Karl Schultz6addd812016-02-02 17:17:23 -070017 *
18 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
19 * Author: Tony Barbour <tony@LunarG.com>
20 */
Chia-I Wuf1e2e992014-12-27 14:12:52 +080021
Petr Krausc3aee2e2019-09-06 00:26:06 +020022#include "vktestbinding.h"
23
Mark Lobodzinski64318ba2017-01-26 13:34:13 -070024#include <string.h> // memset(), memcmp()
Petr Krausc3aee2e2019-09-06 00:26:06 +020025#include <algorithm>
26#include <cassert>
27#include <iostream>
28#include <vector>
29
30#include "test_common.h"
31#include "vk_typemap_helper.h"
Chia-I Wuf1e2e992014-12-27 14:12:52 +080032
33namespace {
Chia-I Wuf1e2e992014-12-27 14:12:52 +080034
Nathaniel Cesarioc19c8b72022-03-10 23:01:21 -070035#define NON_DISPATCHABLE_HANDLE_INIT(create_func, dev, ...) \
36 do { \
37 handle_type handle; \
38 auto result = create_func(dev.handle(), __VA_ARGS__, NULL, &handle); \
39 if (EXPECT((result == VK_SUCCESS) || (result == VK_ERROR_VALIDATION_FAILED_EXT))) { \
40 if (result == VK_SUCCESS) { \
41 NonDispHandle::init(dev.handle(), handle); \
42 } \
43 } \
Chia-I Wuf8f074f2015-07-03 10:58:57 +080044 } while (0)
45
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -060046#define NON_DISPATCHABLE_HANDLE_DTOR(cls, destroy_func) \
47 cls::~cls() NOEXCEPT { \
48 if (initialized()) { \
49 destroy_func(device(), handle(), NULL); \
50 handle_ = VK_NULL_HANDLE; \
51 } \
Chia-I Wud9e8e822015-07-03 11:45:55 +080052 }
53
Chia-I Wuf1e2e992014-12-27 14:12:52 +080054#define STRINGIFY(x) #x
Mark Lobodzinski722841d2016-09-07 16:34:56 -060055#define EXPECT(expr) ((expr) ? true : expect_failure(STRINGIFY(expr), __FILE__, __LINE__, __FUNCTION__))
Chia-I Wuf1e2e992014-12-27 14:12:52 +080056
Courtney Goeltzenleuchterd8e229c2015-04-08 15:36:08 -060057vk_testing::ErrorCallback error_callback;
Chia-I Wuf1e2e992014-12-27 14:12:52 +080058
Mark Lobodzinski722841d2016-09-07 16:34:56 -060059bool expect_failure(const char *expr, const char *file, unsigned int line, const char *function) {
Chia-I Wuf1e2e992014-12-27 14:12:52 +080060 if (error_callback) {
61 error_callback(expr, file, line, function);
62 } else {
Mark Lobodzinski722841d2016-09-07 16:34:56 -060063 std::cerr << file << ":" << line << ": " << function << ": Expectation `" << expr << "' failed.\n";
Chia-I Wuf1e2e992014-12-27 14:12:52 +080064 }
65
66 return false;
67}
68
Mark Lobodzinski64318ba2017-01-26 13:34:13 -070069} // namespace
Chia-I Wuf1e2e992014-12-27 14:12:52 +080070
Courtney Goeltzenleuchterd8e229c2015-04-08 15:36:08 -060071namespace vk_testing {
Chia-I Wuf1e2e992014-12-27 14:12:52 +080072
Karl Schultz6addd812016-02-02 17:17:23 -070073void set_error_callback(ErrorCallback callback) { error_callback = callback; }
Chia-I Wuf1e2e992014-12-27 14:12:52 +080074
Karl Schultz6addd812016-02-02 17:17:23 -070075VkPhysicalDeviceProperties PhysicalDevice::properties() const {
Tony Barbour59a47322015-06-24 16:06:58 -060076 VkPhysicalDeviceProperties info;
77
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -060078 vk::GetPhysicalDeviceProperties(handle(), &info);
Tony Barbour59a47322015-06-24 16:06:58 -060079
80 return info;
Chia-I Wuf1e2e992014-12-27 14:12:52 +080081}
82
Karl Schultz6addd812016-02-02 17:17:23 -070083std::vector<VkQueueFamilyProperties> PhysicalDevice::queue_properties() const {
Cody Northropd0802882015-08-03 17:04:53 -060084 std::vector<VkQueueFamilyProperties> info;
Tony Barbour59a47322015-06-24 16:06:58 -060085 uint32_t count;
86
Cody Northropd0802882015-08-03 17:04:53 -060087 // Call once with NULL data to receive count
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -060088 vk::GetPhysicalDeviceQueueFamilyProperties(handle(), &count, NULL);
Courtney Goeltzenleuchter06d89472015-10-20 16:40:38 -060089 info.resize(count);
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -060090 vk::GetPhysicalDeviceQueueFamilyProperties(handle(), &count, info.data());
Tony Barbour59a47322015-06-24 16:06:58 -060091
92 return info;
Chia-I Wuf1e2e992014-12-27 14:12:52 +080093}
94
Karl Schultz6addd812016-02-02 17:17:23 -070095VkPhysicalDeviceMemoryProperties PhysicalDevice::memory_properties() const {
Tony Barbour59a47322015-06-24 16:06:58 -060096 VkPhysicalDeviceMemoryProperties info;
97
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -060098 vk::GetPhysicalDeviceMemoryProperties(handle(), &info);
Mark Lobodzinskib3fbcd92015-07-02 16:49:40 -060099
Tony Barbour59a47322015-06-24 16:06:58 -0600100 return info;
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800101}
102
Chris Forbesf9cfe182016-04-04 17:22:42 +1200103VkPhysicalDeviceFeatures PhysicalDevice::features() const {
104 VkPhysicalDeviceFeatures features;
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600105 vk::GetPhysicalDeviceFeatures(handle(), &features);
Chris Forbesf9cfe182016-04-04 17:22:42 +1200106 return features;
107}
108
Courtney Goeltzenleuchter110fdf92015-06-29 15:39:26 -0600109/*
110 * Return list of Global layers available
111 */
Karl Schultz6addd812016-02-02 17:17:23 -0700112std::vector<VkLayerProperties> GetGlobalLayers() {
Courtney Goeltzenleuchter110fdf92015-06-29 15:39:26 -0600113 VkResult err;
Charles Giessenadf933c2022-07-12 11:57:16 -0600114 uint32_t layer_count = 32;
115 std::vector<VkLayerProperties> layers(layer_count);
Courtney Goeltzenleuchtercd69eee2015-07-06 09:10:47 -0600116 do {
Petr Kraus979fd532020-04-02 22:07:26 +0200117 err = vk::EnumerateInstanceLayerProperties(&layer_count, layers.data());
Charles Giessenadf933c2022-07-12 11:57:16 -0600118 if (err || 0 == layer_count) return {};
119 if (err == VK_INCOMPLETE) layer_count *= 2; // wasn't enough space, increase it
120 layers.resize(layer_count);
121
Petr Kraus979fd532020-04-02 22:07:26 +0200122 } while (VK_INCOMPLETE == err);
Courtney Goeltzenleuchtercd69eee2015-07-06 09:10:47 -0600123
Petr Kraus979fd532020-04-02 22:07:26 +0200124 assert(!err);
Courtney Goeltzenleuchter110fdf92015-06-29 15:39:26 -0600125 return layers;
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800126}
127
Courtney Goeltzenleuchter110fdf92015-06-29 15:39:26 -0600128/*
129 * Return list of Global extensions provided by the ICD / Loader
130 */
Petr Kraus979fd532020-04-02 22:07:26 +0200131std::vector<VkExtensionProperties> GetGlobalExtensions() { return GetGlobalExtensions(nullptr); }
Courtney Goeltzenleuchter110fdf92015-06-29 15:39:26 -0600132
133/*
134 * Return list of Global extensions provided by the specified layer
Karl Schultz6addd812016-02-02 17:17:23 -0700135 * If pLayerName is NULL, will return extensions implemented by the loader /
136 * ICDs
Courtney Goeltzenleuchter110fdf92015-06-29 15:39:26 -0600137 */
Karl Schultz6addd812016-02-02 17:17:23 -0700138std::vector<VkExtensionProperties> GetGlobalExtensions(const char *pLayerName) {
Courtney Goeltzenleuchter110fdf92015-06-29 15:39:26 -0600139 VkResult err;
Charles Giessenadf933c2022-07-12 11:57:16 -0600140 uint32_t extension_count = 32;
141 std::vector<VkExtensionProperties> extensions(extension_count);
Courtney Goeltzenleuchtercd69eee2015-07-06 09:10:47 -0600142 do {
Petr Kraus979fd532020-04-02 22:07:26 +0200143 err = vk::EnumerateInstanceExtensionProperties(nullptr, &extension_count, extensions.data());
Charles Giessenadf933c2022-07-12 11:57:16 -0600144 if (err || 0 == extension_count) return {};
145 if (err == VK_INCOMPLETE) extension_count *= 2; // wasn't enough space, increase it
146 extensions.resize(extension_count);
Petr Kraus979fd532020-04-02 22:07:26 +0200147 } while (VK_INCOMPLETE == err);
Courtney Goeltzenleuchtercd69eee2015-07-06 09:10:47 -0600148
Petr Kraus979fd532020-04-02 22:07:26 +0200149 assert(!err);
Petr Kraus979fd532020-04-02 22:07:26 +0200150 return extensions;
Courtney Goeltzenleuchter110fdf92015-06-29 15:39:26 -0600151}
152
153/*
Courtney Goeltzenleuchter110fdf92015-06-29 15:39:26 -0600154 * Return list of PhysicalDevice extensions provided by the specified layer
155 * If pLayerName is NULL, will return extensions for ICD / loader.
156 */
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600157std::vector<VkExtensionProperties> PhysicalDevice::extensions(const char *pLayerName) const {
Courtney Goeltzenleuchter110fdf92015-06-29 15:39:26 -0600158 VkResult err;
Charles Giessenadf933c2022-07-12 11:57:16 -0600159 uint32_t extension_count = 256;
160 std::vector<VkExtensionProperties> extensions(extension_count);
Courtney Goeltzenleuchtercd69eee2015-07-06 09:10:47 -0600161 do {
Petr Kraus43bba7b2020-04-02 19:55:31 +0200162 err = vk::EnumerateDeviceExtensionProperties(handle(), pLayerName, &extension_count, extensions.data());
Charles Giessenadf933c2022-07-12 11:57:16 -0600163 if (err || 0 == extension_count) return {};
164 if (err == VK_INCOMPLETE) extension_count *= 2; // wasn't enough space, increase it
165 extensions.resize(extension_count);
Petr Kraus43bba7b2020-04-02 19:55:31 +0200166 } while (VK_INCOMPLETE == err);
Courtney Goeltzenleuchtercd69eee2015-07-06 09:10:47 -0600167
Petr Kraus43bba7b2020-04-02 19:55:31 +0200168 return extensions;
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800169}
170
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600171bool PhysicalDevice::set_memory_type(const uint32_t type_bits, VkMemoryAllocateInfo *info, const VkFlags properties,
Karl Schultz6addd812016-02-02 17:17:23 -0700172 const VkFlags forbid) const {
173 uint32_t type_mask = type_bits;
174 // Search memtypes to find first index with those properties
175 for (uint32_t i = 0; i < memory_properties_.memoryTypeCount; i++) {
176 if ((type_mask & 1) == 1) {
177 // Type is available, does it match user properties?
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600178 if ((memory_properties_.memoryTypes[i].propertyFlags & properties) == properties &&
179 (memory_properties_.memoryTypes[i].propertyFlags & forbid) == 0) {
Karl Schultz6addd812016-02-02 17:17:23 -0700180 info->memoryTypeIndex = i;
181 return true;
182 }
183 }
184 type_mask >>= 1;
185 }
186 // No memory types matched, return failure
187 return false;
Mark Lobodzinskib3fbcd92015-07-02 16:49:40 -0600188}
189
Courtney Goeltzenleuchtercd69eee2015-07-06 09:10:47 -0600190/*
191 * Return list of PhysicalDevice layers
192 */
Karl Schultz6addd812016-02-02 17:17:23 -0700193std::vector<VkLayerProperties> PhysicalDevice::layers() const {
Courtney Goeltzenleuchtercd69eee2015-07-06 09:10:47 -0600194 VkResult err;
Charles Giessenadf933c2022-07-12 11:57:16 -0600195 uint32_t layer_count = 32;
196 std::vector<VkLayerProperties> layers(layer_count);
Courtney Goeltzenleuchtercd69eee2015-07-06 09:10:47 -0600197 do {
Petr Kraus43bba7b2020-04-02 19:55:31 +0200198 err = vk::EnumerateDeviceLayerProperties(handle(), &layer_count, layers.data());
Charles Giessenadf933c2022-07-12 11:57:16 -0600199 if (err || 0 == layer_count) return {};
200 if (err == VK_INCOMPLETE) layer_count *= 2; // wasn't enough space, increase it
201 layers.resize(layer_count);
Petr Kraus43bba7b2020-04-02 19:55:31 +0200202 } while (VK_INCOMPLETE == err);
Courtney Goeltzenleuchtercd69eee2015-07-06 09:10:47 -0600203
Petr Kraus43bba7b2020-04-02 19:55:31 +0200204 return layers;
Courtney Goeltzenleuchtercd69eee2015-07-06 09:10:47 -0600205}
206
Dave Houlton6c72f352018-02-06 17:49:16 -0700207QueueCreateInfoArray::QueueCreateInfoArray(const std::vector<VkQueueFamilyProperties> &queue_props)
208 : queue_info_(), queue_priorities_() {
John Zulauf01da3ee2017-10-18 18:13:37 -0600209 queue_info_.reserve(queue_props.size());
210
Petr Kraus540a67d2017-12-11 00:35:33 +0100211 for (uint32_t i = 0; i < (uint32_t)queue_props.size(); ++i) {
212 if (queue_props[i].queueCount > 0) {
sfricke-samsung6fc3e322022-02-15 22:41:29 -0800213 VkDeviceQueueCreateInfo qi = LvlInitStruct<VkDeviceQueueCreateInfo>();
Petr Kraus540a67d2017-12-11 00:35:33 +0100214 qi.queueFamilyIndex = i;
215 qi.queueCount = queue_props[i].queueCount;
216 queue_priorities_.emplace_back(qi.queueCount, 0.0f);
217 qi.pQueuePriorities = queue_priorities_[i].data();
218 queue_info_.push_back(qi);
219 }
John Zulauf01da3ee2017-10-18 18:13:37 -0600220 }
221}
222
Mark Lobodzinski5f025572020-03-18 12:13:48 -0600223Device::~Device() NOEXCEPT {
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700224 if (!initialized()) return;
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800225
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600226 vk::DestroyDevice(handle(), NULL);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800227}
228
Tony-LunarG58c59b42019-01-03 13:19:11 -0700229void Device::init(std::vector<const char *> &extensions, VkPhysicalDeviceFeatures *features, void *create_device_pnext) {
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800230 // request all queues
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600231 const std::vector<VkQueueFamilyProperties> queue_props = phy_.queue_properties();
John Zulauf01da3ee2017-10-18 18:13:37 -0600232 QueueCreateInfoArray queue_info(phy_.queue_properties());
Mark Young93ecb1d2016-01-13 13:47:16 -0700233 for (uint32_t i = 0; i < (uint32_t)queue_props.size(); i++) {
Courtney Goeltzenleuchterd8e229c2015-04-08 15:36:08 -0600234 if (queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700235 graphics_queue_node_index_ = i;
William Henning6a354a52018-05-23 18:01:51 -0600236 break;
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700237 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800238 }
Tobin Ehlis04dea912017-11-14 12:10:11 -0700239 // Only request creation with queuefamilies that have at least one queue
240 std::vector<VkDeviceQueueCreateInfo> create_queue_infos;
241 auto qci = queue_info.data();
242 for (uint32_t j = 0; j < queue_info.size(); ++j) {
243 if (qci[j].queueCount) {
244 create_queue_infos.push_back(qci[j]);
245 }
246 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800247
Petr Krausb9659a02017-12-11 01:17:46 +0100248 enabled_extensions_ = extensions;
249
sfricke-samsung6fc3e322022-02-15 22:41:29 -0800250 VkDeviceCreateInfo dev_info = LvlInitStruct<VkDeviceCreateInfo>(create_device_pnext);
Tobin Ehlis04dea912017-11-14 12:10:11 -0700251 dev_info.queueCreateInfoCount = create_queue_infos.size();
252 dev_info.pQueueCreateInfos = create_queue_infos.data();
Tony Barbour4c70d102016-08-08 16:06:56 -0600253 dev_info.enabledLayerCount = 0;
254 dev_info.ppEnabledLayerNames = NULL;
Jon Ashburnf19916e2016-01-11 13:12:43 -0700255 dev_info.enabledExtensionCount = extensions.size();
Tony Barbour482c6092015-07-27 09:37:48 -0600256 dev_info.ppEnabledExtensionNames = extensions.data();
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800257
Tony Barbour53f7e892016-08-09 13:44:00 -0600258 VkPhysicalDeviceFeatures all_features;
Jeff Bolzfdf96072018-04-10 14:32:18 -0500259 // Let VkPhysicalDeviceFeatures2 take priority over VkPhysicalDeviceFeatures,
260 // since it supports extensions
Tony-LunarG58c59b42019-01-03 13:19:11 -0700261
Mark Lobodzinski1f887d32020-12-30 15:31:33 -0700262 if (!(LvlFindInChain<VkPhysicalDeviceFeatures2>(dev_info.pNext))) {
Tony-LunarG58c59b42019-01-03 13:19:11 -0700263 if (features) {
264 dev_info.pEnabledFeatures = features;
265 } else {
266 // request all supportable features enabled
267 all_features = phy().features();
268 dev_info.pEnabledFeatures = &all_features;
269 }
Tony Barbour53f7e892016-08-09 13:44:00 -0600270 }
Chris Forbesf9cfe182016-04-04 17:22:42 +1200271
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800272 init(dev_info);
273}
274
Karl Schultz6addd812016-02-02 17:17:23 -0700275void Device::init(const VkDeviceCreateInfo &info) {
Chia-I Wuf368b602015-07-03 10:41:20 +0800276 VkDevice dev;
277
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600278 if (EXPECT(vk::CreateDevice(phy_.handle(), &info, NULL, &dev) == VK_SUCCESS)) Handle::init(dev);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800279
Jeremy Gebbenc7af14b2021-10-22 11:16:48 -0600280 init_queues(info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800281 init_formats();
282}
283
Jeremy Gebbenc7af14b2021-10-22 11:16:48 -0600284void Device::init_queues(const VkDeviceCreateInfo &info) {
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700285 uint32_t queue_node_count;
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600286 vk::GetPhysicalDeviceQueueFamilyProperties(phy_.handle(), &queue_node_count, NULL);
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700287 EXPECT(queue_node_count >= 1);
288
Petr Krausc3aee2e2019-09-06 00:26:06 +0200289 std::vector<VkQueueFamilyProperties> queue_props(queue_node_count);
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600290 vk::GetPhysicalDeviceQueueFamilyProperties(phy_.handle(), &queue_node_count, queue_props.data());
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700291
John Zulauf5abdf122018-03-27 10:12:37 -0600292 queue_families_.resize(queue_node_count);
Jeremy Gebbenc7af14b2021-10-22 11:16:48 -0600293 for (uint32_t i = 0; i < info.queueCreateInfoCount; i++) {
294 const auto &queue_create_info = info.pQueueCreateInfos[i];
295 auto queue_family_i = queue_create_info.queueFamilyIndex;
296 const auto &queue_family_prop = queue_props[queue_family_i];
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700297
Petr Krausc3aee2e2019-09-06 00:26:06 +0200298 QueueFamilyQueues &queue_storage = queue_families_[queue_family_i];
Jeremy Gebbenc7af14b2021-10-22 11:16:48 -0600299 queue_storage.reserve(queue_create_info.queueCount);
300 for (uint32_t queue_i = 0; queue_i < queue_create_info.queueCount; ++queue_i) {
Karl Schultz6addd812016-02-02 17:17:23 -0700301 // TODO: Need to add support for separate MEMMGR and work queues,
302 // including synchronization
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -0600303 VkQueue queue = VK_NULL_HANDLE;
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600304 vk::GetDeviceQueue(handle(), queue_family_i, queue_i, &queue);
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700305
John Zulauf5abdf122018-03-27 10:12:37 -0600306 // Store single copy of the queue object that will self destruct
Petr Krausc3aee2e2019-09-06 00:26:06 +0200307 queue_storage.emplace_back(new Queue(queue, queue_family_i));
John Zulauf5abdf122018-03-27 10:12:37 -0600308
Jeremy Gebbenc7af14b2021-10-22 11:16:48 -0600309 if (queue_family_prop.queueFlags & VK_QUEUE_GRAPHICS_BIT) {
John Zulauf5abdf122018-03-27 10:12:37 -0600310 queues_[GRAPHICS].push_back(queue_storage.back().get());
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700311 }
312
Jeremy Gebbenc7af14b2021-10-22 11:16:48 -0600313 if (queue_family_prop.queueFlags & VK_QUEUE_COMPUTE_BIT) {
John Zulauf5abdf122018-03-27 10:12:37 -0600314 queues_[COMPUTE].push_back(queue_storage.back().get());
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700315 }
316
Jeremy Gebbenc7af14b2021-10-22 11:16:48 -0600317 if (queue_family_prop.queueFlags & VK_QUEUE_TRANSFER_BIT) {
John Zulauf5abdf122018-03-27 10:12:37 -0600318 queues_[DMA].push_back(queue_storage.back().get());
Courtney Goeltzenleuchter18248e62015-03-05 18:09:39 -0700319 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800320 }
321 }
322
Jeremy Gebbenc7af14b2021-10-22 11:16:48 -0600323 EXPECT(!queues_[GRAPHICS].empty() || !queues_[COMPUTE].empty() || !queues_[DMA].empty());
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800324}
Petr Krausc3aee2e2019-09-06 00:26:06 +0200325
John Zulauf5abdf122018-03-27 10:12:37 -0600326const Device::QueueFamilyQueues &Device::queue_family_queues(uint32_t queue_family) const {
327 assert(queue_family < queue_families_.size());
328 return queue_families_[queue_family];
329}
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800330
Karl Schultz6addd812016-02-02 17:17:23 -0700331void Device::init_formats() {
Mark Lobodzinski9a6555f2020-02-10 16:16:29 -0700332 // For each 1.0 core format, undefined = first, 12x12_SRGB_BLOCK = last
333 for (int f = VK_FORMAT_UNDEFINED; f <= VK_FORMAT_ASTC_12x12_SRGB_BLOCK; f++) {
Courtney Goeltzenleuchterfb4efc62015-04-10 08:34:15 -0600334 const VkFormat fmt = static_cast<VkFormat>(f);
335 const VkFormatProperties props = format_properties(fmt);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800336
Jeremy Hayesa058eee2015-01-23 08:51:43 -0700337 if (props.linearTilingFeatures) {
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600338 const Format tmp = {fmt, VK_IMAGE_TILING_LINEAR, props.linearTilingFeatures};
Jeremy Hayesa058eee2015-01-23 08:51:43 -0700339 formats_.push_back(tmp);
340 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800341
Jeremy Hayesa058eee2015-01-23 08:51:43 -0700342 if (props.optimalTilingFeatures) {
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600343 const Format tmp = {fmt, VK_IMAGE_TILING_OPTIMAL, props.optimalTilingFeatures};
Jeremy Hayesa058eee2015-01-23 08:51:43 -0700344 formats_.push_back(tmp);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800345 }
346 }
347
348 EXPECT(!formats_.empty());
349}
350
Cort Stratton72f89aa2018-05-27 10:40:27 -0700351bool Device::IsEnabledExtension(const char *extension) {
Petr Krausb9659a02017-12-11 01:17:46 +0100352 const auto is_x = [&extension](const char *enabled_extension) { return strcmp(extension, enabled_extension) == 0; };
353 return std::any_of(enabled_extensions_.begin(), enabled_extensions_.end(), is_x);
354}
355
Karl Schultz6addd812016-02-02 17:17:23 -0700356VkFormatProperties Device::format_properties(VkFormat format) {
Courtney Goeltzenleuchterfb4efc62015-04-10 08:34:15 -0600357 VkFormatProperties data;
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600358 vk::GetPhysicalDeviceFormatProperties(phy().handle(), format, &data);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800359
360 return data;
361}
362
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600363void Device::wait() { EXPECT(vk::DeviceWaitIdle(handle()) == VK_SUCCESS); }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800364
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600365VkResult Device::wait(const std::vector<const Fence *> &fences, bool wait_all, uint64_t timeout) {
Petr Kraus858bacd2017-12-01 23:10:08 +0100366 const std::vector<VkFence> fence_handles = MakeVkHandles<VkFence>(fences);
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600367 VkResult err = vk::WaitForFences(handle(), fence_handles.size(), fence_handles.data(), wait_all, timeout);
Courtney Goeltzenleuchterd8e229c2015-04-08 15:36:08 -0600368 EXPECT(err == VK_SUCCESS || err == VK_TIMEOUT);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800369
370 return err;
371}
372
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600373void Device::update_descriptor_sets(const std::vector<VkWriteDescriptorSet> &writes,
374 const std::vector<VkCopyDescriptorSet> &copies) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600375 vk::UpdateDescriptorSets(handle(), writes.size(), writes.data(), copies.size(), copies.data());
Chia-I Wu9d00ed72015-05-25 16:27:55 +0800376}
377
John Zulauf71095472018-03-26 14:45:12 -0600378VkResult Queue::submit(const std::vector<const CommandBuffer *> &cmds, const Fence &fence, bool expect_success) {
Petr Kraus858bacd2017-12-01 23:10:08 +0100379 const std::vector<VkCommandBuffer> cmd_handles = MakeVkHandles<VkCommandBuffer>(cmds);
sfricke-samsung6fc3e322022-02-15 22:41:29 -0800380 VkSubmitInfo submit_info = LvlInitStruct<VkSubmitInfo>();
Chia-I Wud50a7d72015-10-26 20:48:51 +0800381 submit_info.waitSemaphoreCount = 0;
Courtney Goeltzenleuchter806c7002015-10-27 11:22:14 -0600382 submit_info.pWaitSemaphores = NULL;
Jon Ashburn7f9716c2015-12-30 16:42:50 -0700383 submit_info.pWaitDstStageMask = NULL;
Chia-I Wud50a7d72015-10-26 20:48:51 +0800384 submit_info.commandBufferCount = (uint32_t)cmd_handles.size();
Courtney Goeltzenleuchter806c7002015-10-27 11:22:14 -0600385 submit_info.pCommandBuffers = cmd_handles.data();
Chia-I Wud50a7d72015-10-26 20:48:51 +0800386 submit_info.signalSemaphoreCount = 0;
Courtney Goeltzenleuchter806c7002015-10-27 11:22:14 -0600387 submit_info.pSignalSemaphores = NULL;
Courtney Goeltzenleuchter646b9072015-10-20 18:04:07 -0600388
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600389 VkResult result = vk::QueueSubmit(handle(), 1, &submit_info, fence.handle());
John Zulauf71095472018-03-26 14:45:12 -0600390 if (expect_success) EXPECT(result == VK_SUCCESS);
391 return result;
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800392}
393
John Zulauf71095472018-03-26 14:45:12 -0600394VkResult Queue::submit(const CommandBuffer &cmd, const Fence &fence, bool expect_success) {
395 return submit(std::vector<const CommandBuffer *>(1, &cmd), fence, expect_success);
396}
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800397
John Zulauf71095472018-03-26 14:45:12 -0600398VkResult Queue::submit(const CommandBuffer &cmd, bool expect_success) {
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800399 Fence fence;
John Zulauf71095472018-03-26 14:45:12 -0600400 return submit(cmd, fence);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800401}
402
John Zulauf71095472018-03-26 14:45:12 -0600403VkResult Queue::wait() {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600404 VkResult result = vk::QueueWaitIdle(handle());
John Zulauf71095472018-03-26 14:45:12 -0600405 EXPECT(result == VK_SUCCESS);
406 return result;
407}
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800408
Mark Lobodzinski5f025572020-03-18 12:13:48 -0600409DeviceMemory::~DeviceMemory() NOEXCEPT {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600410 if (initialized()) vk::FreeMemory(device(), handle(), NULL);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800411}
412
Karl Schultz6addd812016-02-02 17:17:23 -0700413void DeviceMemory::init(const Device &dev, const VkMemoryAllocateInfo &info) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600414 NON_DISPATCHABLE_HANDLE_INIT(vk::AllocateMemory, dev, &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800415}
416
Karl Schultz6addd812016-02-02 17:17:23 -0700417const void *DeviceMemory::map(VkFlags flags) const {
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800418 void *data;
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600419 if (!EXPECT(vk::MapMemory(device(), handle(), 0, VK_WHOLE_SIZE, flags, &data) == VK_SUCCESS)) data = NULL;
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800420
421 return data;
422}
423
Karl Schultz6addd812016-02-02 17:17:23 -0700424void *DeviceMemory::map(VkFlags flags) {
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800425 void *data;
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600426 if (!EXPECT(vk::MapMemory(device(), handle(), 0, VK_WHOLE_SIZE, flags, &data) == VK_SUCCESS)) data = NULL;
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800427
428 return data;
429}
430
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600431void DeviceMemory::unmap() const { vk::UnmapMemory(device(), handle()); }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800432
Mike Schuchardt8cacbb02017-10-26 14:06:38 -0600433VkMemoryAllocateInfo DeviceMemory::get_resource_alloc_info(const Device &dev, const VkMemoryRequirements &reqs,
434 VkMemoryPropertyFlags mem_props) {
Tobin Ehlisa3d60642017-11-14 10:02:46 -0700435 // Find appropriate memory type for given reqs
436 VkPhysicalDeviceMemoryProperties dev_mem_props = dev.phy().memory_properties();
437 uint32_t mem_type_index = 0;
438 for (mem_type_index = 0; mem_type_index < dev_mem_props.memoryTypeCount; ++mem_type_index) {
439 if (mem_props == (mem_props & dev_mem_props.memoryTypes[mem_type_index].propertyFlags)) break;
440 }
441 // If we exceeded types, then this device doesn't have the memory we need
442 assert(mem_type_index < dev_mem_props.memoryTypeCount);
443 VkMemoryAllocateInfo info = alloc_info(reqs.size, mem_type_index);
Mike Schuchardt7e567b32017-11-16 13:15:20 -0700444 EXPECT(dev.phy().set_memory_type(reqs.memoryTypeBits, &info, mem_props));
Mike Schuchardt8cacbb02017-10-26 14:06:38 -0600445 return info;
446}
447
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600448NON_DISPATCHABLE_HANDLE_DTOR(Fence, vk::DestroyFence)
Chia-I Wud9e8e822015-07-03 11:45:55 +0800449
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600450void Fence::init(const Device &dev, const VkFenceCreateInfo &info) { NON_DISPATCHABLE_HANDLE_INIT(vk::CreateFence, dev, &info); }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800451
Petr Kraus70fc4652020-04-25 20:10:49 +0200452VkResult Fence::wait(uint64_t timeout) const {
John Zulauf71095472018-03-26 14:45:12 -0600453 VkFence fence = handle();
Petr Kraus70fc4652020-04-25 20:10:49 +0200454 return vk::WaitForFences(device(), 1, &fence, VK_TRUE, timeout);
John Zulauf71095472018-03-26 14:45:12 -0600455}
456
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600457NON_DISPATCHABLE_HANDLE_DTOR(Semaphore, vk::DestroySemaphore)
Chia-I Wu6b1c2482015-07-03 11:49:42 +0800458
Karl Schultz6addd812016-02-02 17:17:23 -0700459void Semaphore::init(const Device &dev, const VkSemaphoreCreateInfo &info) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600460 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateSemaphore, dev, &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800461}
462
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600463NON_DISPATCHABLE_HANDLE_DTOR(Event, vk::DestroyEvent)
Chia-I Wuc5c97992015-07-03 11:49:42 +0800464
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600465void Event::init(const Device &dev, const VkEventCreateInfo &info) { NON_DISPATCHABLE_HANDLE_INIT(vk::CreateEvent, dev, &info); }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800466
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600467void Event::set() { EXPECT(vk::SetEvent(device(), handle()) == VK_SUCCESS); }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800468
John Zulauf77019312020-12-15 09:04:32 -0700469void Event::cmd_set(const CommandBuffer &cmd, VkPipelineStageFlags stage_mask) {
470 vk::CmdSetEvent(cmd.handle(), handle(), stage_mask);
471}
472
473void Event::cmd_reset(const CommandBuffer &cmd, VkPipelineStageFlags stage_mask) {
474 vk::CmdResetEvent(cmd.handle(), handle(), stage_mask);
475}
476
John Zulaufb66ee052022-06-10 16:52:28 -0600477void Event::cmd_wait(const CommandBuffer &cmd, VkPipelineStageFlags src_stage_mask, VkPipelineStageFlags dst_stage_mask,
478 const std::vector<VkMemoryBarrier> &memory_barriers, const std::vector<VkBufferMemoryBarrier> &buffer_barriers,
479 const std::vector<VkImageMemoryBarrier> &image_barriers) {
480 VkEvent event_handle = handle();
481 vk::CmdWaitEvents(cmd.handle(), 1, &event_handle, src_stage_mask, dst_stage_mask, static_cast<uint32_t>(memory_barriers.size()),
482 memory_barriers.data(), static_cast<uint32_t>(buffer_barriers.size()), buffer_barriers.data(),
483 static_cast<uint32_t>(image_barriers.size()), image_barriers.data());
484}
485
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600486void Event::reset() { EXPECT(vk::ResetEvent(device(), handle()) == VK_SUCCESS); }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800487
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600488NON_DISPATCHABLE_HANDLE_DTOR(QueryPool, vk::DestroyQueryPool)
Chia-I Wu1b7d4762015-07-03 11:49:42 +0800489
Karl Schultz6addd812016-02-02 17:17:23 -0700490void QueryPool::init(const Device &dev, const VkQueryPoolCreateInfo &info) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600491 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateQueryPool, dev, &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800492}
493
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600494VkResult QueryPool::results(uint32_t first, uint32_t count, size_t size, void *data, size_t stride) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600495 VkResult err = vk::GetQueryPoolResults(device(), handle(), first, count, size, data, stride, 0);
Chia-I Wuccc93a72015-10-26 18:36:20 +0800496 EXPECT(err == VK_SUCCESS || err == VK_NOT_READY);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800497
498 return err;
499}
500
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600501NON_DISPATCHABLE_HANDLE_DTOR(Buffer, vk::DestroyBuffer)
Chia-I Wu1a28fe02015-01-01 07:55:04 +0800502
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600503void Buffer::init(const Device &dev, const VkBufferCreateInfo &info, VkMemoryPropertyFlags mem_props) {
Tony Barbour4c97d7a2015-04-22 15:10:33 -0600504 init_no_mem(dev, info);
Chia-I Wu681d7a02015-07-03 13:44:34 +0800505
Mike Schuchardt8cacbb02017-10-26 14:06:38 -0600506 internal_mem_.init(dev, DeviceMemory::get_resource_alloc_info(dev, memory_requirements(), mem_props));
Chia-I Wu681d7a02015-07-03 13:44:34 +0800507 bind_memory(internal_mem_, 0);
Tony Barbour4c97d7a2015-04-22 15:10:33 -0600508}
509
Karl Schultz6addd812016-02-02 17:17:23 -0700510void Buffer::init_no_mem(const Device &dev, const VkBufferCreateInfo &info) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600511 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateBuffer, dev, &info);
Chia-I Wu1a28fe02015-01-01 07:55:04 +0800512 create_info_ = info;
513}
514
Karl Schultz6addd812016-02-02 17:17:23 -0700515VkMemoryRequirements Buffer::memory_requirements() const {
Chia-I Wu681d7a02015-07-03 13:44:34 +0800516 VkMemoryRequirements reqs;
517
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600518 vk::GetBufferMemoryRequirements(device(), handle(), &reqs);
Chia-I Wu681d7a02015-07-03 13:44:34 +0800519
520 return reqs;
521}
522
Karl Schultz6addd812016-02-02 17:17:23 -0700523void Buffer::bind_memory(const DeviceMemory &mem, VkDeviceSize mem_offset) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600524 EXPECT(vk::BindBufferMemory(device(), handle(), mem.handle(), mem_offset) == VK_SUCCESS);
Mark Lobodzinski942b1722015-05-11 17:21:15 -0500525}
526
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -0600527void Buffer::bind_memory(const Device &dev, VkMemoryPropertyFlags mem_props, VkDeviceSize mem_offset) {
528 if (!internal_mem_.initialized()) {
529 internal_mem_.init(dev, DeviceMemory::get_resource_alloc_info(dev, memory_requirements(), mem_props));
530 }
531 bind_memory(internal_mem_, mem_offset);
532}
533
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600534NON_DISPATCHABLE_HANDLE_DTOR(BufferView, vk::DestroyBufferView)
Chia-I Wu3158bf32015-07-03 11:49:42 +0800535
Karl Schultz6addd812016-02-02 17:17:23 -0700536void BufferView::init(const Device &dev, const VkBufferViewCreateInfo &info) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600537 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateBufferView, dev, &info);
Chia-I Wu1a28fe02015-01-01 07:55:04 +0800538}
539
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600540NON_DISPATCHABLE_HANDLE_DTOR(Image, vk::DestroyImage)
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800541
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600542void Image::init(const Device &dev, const VkImageCreateInfo &info, VkMemoryPropertyFlags mem_props) {
Tony Barbour4c97d7a2015-04-22 15:10:33 -0600543 init_no_mem(dev, info);
Chia-I Wu681d7a02015-07-03 13:44:34 +0800544
Karl Schultzb5bc11e2016-05-04 08:36:08 -0600545 if (initialized()) {
Mike Schuchardt8cacbb02017-10-26 14:06:38 -0600546 internal_mem_.init(dev, DeviceMemory::get_resource_alloc_info(dev, memory_requirements(), mem_props));
Karl Schultzb5bc11e2016-05-04 08:36:08 -0600547 bind_memory(internal_mem_, 0);
548 }
Tony Barbour4c97d7a2015-04-22 15:10:33 -0600549}
550
Karl Schultz6addd812016-02-02 17:17:23 -0700551void Image::init_no_mem(const Device &dev, const VkImageCreateInfo &info) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600552 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateImage, dev, &info);
Karl Schultzb5bc11e2016-05-04 08:36:08 -0600553 if (initialized()) {
554 init_info(dev, info);
555 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800556}
557
Karl Schultz6addd812016-02-02 17:17:23 -0700558void Image::init_info(const Device &dev, const VkImageCreateInfo &info) {
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800559 create_info_ = info;
560
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600561 for (std::vector<Device::Format>::const_iterator it = dev.formats().begin(); it != dev.formats().end(); it++) {
562 if (memcmp(&it->format, &create_info_.format, sizeof(it->format)) == 0 && it->tiling == create_info_.tiling) {
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800563 format_features_ = it->features;
564 break;
565 }
566 }
567}
568
Karl Schultz6addd812016-02-02 17:17:23 -0700569VkMemoryRequirements Image::memory_requirements() const {
Chia-I Wu681d7a02015-07-03 13:44:34 +0800570 VkMemoryRequirements reqs;
571
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600572 vk::GetImageMemoryRequirements(device(), handle(), &reqs);
Chia-I Wu681d7a02015-07-03 13:44:34 +0800573
574 return reqs;
575}
576
Karl Schultz6addd812016-02-02 17:17:23 -0700577void Image::bind_memory(const DeviceMemory &mem, VkDeviceSize mem_offset) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600578 EXPECT(vk::BindImageMemory(device(), handle(), mem.handle(), mem_offset) == VK_SUCCESS);
Chia-I Wu1a28fe02015-01-01 07:55:04 +0800579}
580
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600581VkSubresourceLayout Image::subresource_layout(const VkImageSubresource &subres) const {
Courtney Goeltzenleuchterfb4efc62015-04-10 08:34:15 -0600582 VkSubresourceLayout data;
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800583 size_t size = sizeof(data);
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600584 vk::GetImageSubresourceLayout(device(), handle(), &subres, &data);
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700585 if (size != sizeof(data)) memset(&data, 0, sizeof(data));
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800586
587 return data;
588}
589
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600590VkSubresourceLayout Image::subresource_layout(const VkImageSubresourceLayers &subrescopy) const {
Courtney Goeltzenleuchter01ee1ca2015-09-10 16:41:13 -0600591 VkSubresourceLayout data;
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600592 VkImageSubresource subres = subresource(subrescopy.aspectMask, subrescopy.mipLevel, subrescopy.baseArrayLayer);
Courtney Goeltzenleuchter01ee1ca2015-09-10 16:41:13 -0600593 size_t size = sizeof(data);
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600594 vk::GetImageSubresourceLayout(device(), handle(), &subres, &data);
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700595 if (size != sizeof(data)) memset(&data, 0, sizeof(data));
Courtney Goeltzenleuchter01ee1ca2015-09-10 16:41:13 -0600596
597 return data;
598}
599
Karl Schultz6addd812016-02-02 17:17:23 -0700600bool Image::transparent() const {
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600601 return (create_info_.tiling == VK_IMAGE_TILING_LINEAR && create_info_.samples == VK_SAMPLE_COUNT_1_BIT &&
602 !(create_info_.usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)));
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800603}
604
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600605NON_DISPATCHABLE_HANDLE_DTOR(ImageView, vk::DestroyImageView)
Chia-I Wu3158bf32015-07-03 11:49:42 +0800606
Karl Schultz6addd812016-02-02 17:17:23 -0700607void ImageView::init(const Device &dev, const VkImageViewCreateInfo &info) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600608 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateImageView, dev, &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800609}
610
Jason Macnakd218cba2019-07-09 15:47:02 -0700611AccelerationStructure::~AccelerationStructure() {
612 if (initialized()) {
sourav parmarbb9d8f82020-07-17 13:01:41 -0700613 PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV =
614 (PFN_vkDestroyAccelerationStructureNV)vk::GetDeviceProcAddr(device(), "vkDestroyAccelerationStructureNV");
615 assert(vkDestroyAccelerationStructureNV != nullptr);
Jason Macnakd218cba2019-07-09 15:47:02 -0700616
sourav parmarbb9d8f82020-07-17 13:01:41 -0700617 vkDestroyAccelerationStructureNV(device(), handle(), nullptr);
Jason Macnakd218cba2019-07-09 15:47:02 -0700618 }
619}
620
sourav parmarbb9d8f82020-07-17 13:01:41 -0700621AccelerationStructureKHR::~AccelerationStructureKHR() {
622 if (initialized()) {
623 PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR =
624 (PFN_vkDestroyAccelerationStructureKHR)vk::GetDeviceProcAddr(device(), "vkDestroyAccelerationStructureKHR");
625 assert(vkDestroyAccelerationStructureKHR != nullptr);
626 vkDestroyAccelerationStructureKHR(device(), handle(), nullptr);
627 }
628}
629VkMemoryRequirements2 AccelerationStructure::memory_requirements() const {
Jason Macnakd218cba2019-07-09 15:47:02 -0700630 PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV =
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600631 (PFN_vkGetAccelerationStructureMemoryRequirementsNV)vk::GetDeviceProcAddr(device(),
632 "vkGetAccelerationStructureMemoryRequirementsNV");
sourav parmarbb9d8f82020-07-17 13:01:41 -0700633 assert(vkGetAccelerationStructureMemoryRequirementsNV != nullptr);
Jason Macnakd218cba2019-07-09 15:47:02 -0700634 VkMemoryRequirements2 memoryRequirements = {};
sfricke-samsung6fc3e322022-02-15 22:41:29 -0800635 VkAccelerationStructureMemoryRequirementsInfoNV memoryRequirementsInfo =
636 LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
sourav parmarbb9d8f82020-07-17 13:01:41 -0700637 memoryRequirementsInfo.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV;
638 memoryRequirementsInfo.accelerationStructure = handle();
639 vkGetAccelerationStructureMemoryRequirementsNV(device(), &memoryRequirementsInfo, &memoryRequirements);
Jason Macnakd218cba2019-07-09 15:47:02 -0700640 return memoryRequirements;
641}
642
643VkMemoryRequirements2 AccelerationStructure::build_scratch_memory_requirements() const {
644 PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV =
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600645 (PFN_vkGetAccelerationStructureMemoryRequirementsNV)vk::GetDeviceProcAddr(device(),
646 "vkGetAccelerationStructureMemoryRequirementsNV");
Jason Macnakd218cba2019-07-09 15:47:02 -0700647 assert(vkGetAccelerationStructureMemoryRequirementsNV != nullptr);
648
sfricke-samsung6fc3e322022-02-15 22:41:29 -0800649 VkAccelerationStructureMemoryRequirementsInfoNV memoryRequirementsInfo =
650 LvlInitStruct<VkAccelerationStructureMemoryRequirementsInfoNV>();
Jason Macnakd218cba2019-07-09 15:47:02 -0700651 memoryRequirementsInfo.type = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV;
652 memoryRequirementsInfo.accelerationStructure = handle();
653
654 VkMemoryRequirements2 memoryRequirements = {};
655 vkGetAccelerationStructureMemoryRequirementsNV(device(), &memoryRequirementsInfo, &memoryRequirements);
656 return memoryRequirements;
657}
658
659void AccelerationStructure::init(const Device &dev, const VkAccelerationStructureCreateInfoNV &info, bool init_memory) {
660 PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV =
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600661 (PFN_vkCreateAccelerationStructureNV)vk::GetDeviceProcAddr(dev.handle(), "vkCreateAccelerationStructureNV");
Jason Macnakd218cba2019-07-09 15:47:02 -0700662 assert(vkCreateAccelerationStructureNV != nullptr);
663
664 NON_DISPATCHABLE_HANDLE_INIT(vkCreateAccelerationStructureNV, dev, &info);
665
666 info_ = info.info;
667
668 if (init_memory) {
669 memory_.init(dev, DeviceMemory::get_resource_alloc_info(dev, memory_requirements().memoryRequirements,
670 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT));
671
672 PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV =
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600673 (PFN_vkBindAccelerationStructureMemoryNV)vk::GetDeviceProcAddr(dev.handle(), "vkBindAccelerationStructureMemoryNV");
Jason Macnakd218cba2019-07-09 15:47:02 -0700674 assert(vkBindAccelerationStructureMemoryNV != nullptr);
675
sfricke-samsung6fc3e322022-02-15 22:41:29 -0800676 VkBindAccelerationStructureMemoryInfoNV bind_info = LvlInitStruct<VkBindAccelerationStructureMemoryInfoNV>();
Jason Macnakd218cba2019-07-09 15:47:02 -0700677 bind_info.accelerationStructure = handle();
678 bind_info.memory = memory_.handle();
679 EXPECT(vkBindAccelerationStructureMemoryNV(dev.handle(), 1, &bind_info) == VK_SUCCESS);
Jason Macnake4b41ca2019-07-11 11:05:31 -0700680
681 PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV =
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600682 (PFN_vkGetAccelerationStructureHandleNV)vk::GetDeviceProcAddr(dev.handle(), "vkGetAccelerationStructureHandleNV");
Jason Macnake4b41ca2019-07-11 11:05:31 -0700683 assert(vkGetAccelerationStructureHandleNV != nullptr);
684 EXPECT(vkGetAccelerationStructureHandleNV(dev.handle(), handle(), sizeof(uint64_t), &opaque_handle_) == VK_SUCCESS);
Jason Macnakd218cba2019-07-09 15:47:02 -0700685 }
686}
sourav parmar1e8c7ff2020-04-25 16:43:03 -0700687void AccelerationStructure::create_scratch_buffer(const Device &dev, Buffer *buffer, VkBufferCreateInfo *pCreateInfo) {
Jason Macnakd218cba2019-07-09 15:47:02 -0700688 VkMemoryRequirements scratch_buffer_memory_requirements = build_scratch_memory_requirements().memoryRequirements;
Jason Macnakd218cba2019-07-09 15:47:02 -0700689 VkBufferCreateInfo create_info = {};
Jason Macnakd218cba2019-07-09 15:47:02 -0700690 create_info.size = scratch_buffer_memory_requirements.size;
sourav parmar1e8c7ff2020-04-25 16:43:03 -0700691 if (pCreateInfo) {
692 create_info.sType = pCreateInfo->sType;
693 create_info.usage = pCreateInfo->usage;
694 } else {
695 create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
696 create_info.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
697 }
698 buffer->init(dev, create_info, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
Jason Macnakd218cba2019-07-09 15:47:02 -0700699}
700
sourav parmarbb9d8f82020-07-17 13:01:41 -0700701void AccelerationStructureKHR::init(const Device &dev, const VkAccelerationStructureCreateInfoKHR &info, bool init_memory) {
702 PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR =
703 (PFN_vkCreateAccelerationStructureKHR)vk::GetDeviceProcAddr(dev.handle(), "vkCreateAccelerationStructureKHR");
704 assert(vkCreateAccelerationStructureKHR != nullptr);
705 NON_DISPATCHABLE_HANDLE_INIT(vkCreateAccelerationStructureKHR, dev, &info);
706 info_ = info;
707}
708void AccelerationStructureKHR::create_scratch_buffer(const Device &dev, Buffer *buffer, VkBufferCreateInfo *pCreateInfo) {
709 VkBufferCreateInfo create_info = {};
710 create_info.size = 0;
711 if (pCreateInfo) {
712 create_info.sType = pCreateInfo->sType;
713 create_info.usage = pCreateInfo->usage;
714 create_info.size = pCreateInfo->size;
715 } else {
716 create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
717 create_info.usage = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV;
718 }
719 buffer->init(dev, create_info, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
720}
721
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600722NON_DISPATCHABLE_HANDLE_DTOR(ShaderModule, vk::DestroyShaderModule)
Chia-I Wu4d0c7922015-07-03 11:49:42 +0800723
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600724void ShaderModule::init(const Device &dev, const VkShaderModuleCreateInfo &info) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600725 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateShaderModule, dev, &info);
Courtney Goeltzenleuchteree4027d2015-06-28 13:01:17 -0600726}
727
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600728VkResult ShaderModule::init_try(const Device &dev, const VkShaderModuleCreateInfo &info) {
Chia-I Wu4d0c7922015-07-03 11:49:42 +0800729 VkShaderModule mod;
730
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600731 VkResult err = vk::CreateShaderModule(dev.handle(), &info, NULL, &mod);
Mark Lobodzinski64318ba2017-01-26 13:34:13 -0700732 if (err == VK_SUCCESS) NonDispHandle::init(dev.handle(), mod);
Courtney Goeltzenleuchteree4027d2015-06-28 13:01:17 -0600733
734 return err;
735}
736
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600737NON_DISPATCHABLE_HANDLE_DTOR(Pipeline, vk::DestroyPipeline)
Chia-I Wu2ff72fd2015-07-03 11:49:42 +0800738
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600739void Pipeline::init(const Device &dev, const VkGraphicsPipelineCreateInfo &info) {
Jon Ashburnc669cc62015-07-09 15:02:25 -0600740 VkPipelineCache cache;
sfricke-samsung6fc3e322022-02-15 22:41:29 -0800741 VkPipelineCacheCreateInfo ci = LvlInitStruct<VkPipelineCacheCreateInfo>();
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600742 VkResult err = vk::CreatePipelineCache(dev.handle(), &ci, NULL, &cache);
Jon Ashburnc669cc62015-07-09 15:02:25 -0600743 if (err == VK_SUCCESS) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600744 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateGraphicsPipelines, dev, cache, 1, &info);
745 vk::DestroyPipelineCache(dev.handle(), cache, NULL);
Jon Ashburnc669cc62015-07-09 15:02:25 -0600746 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800747}
748
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600749VkResult Pipeline::init_try(const Device &dev, const VkGraphicsPipelineCreateInfo &info) {
Chris Forbes95292b12015-05-25 11:13:26 +1200750 VkPipeline pipe;
Jon Ashburnc669cc62015-07-09 15:02:25 -0600751 VkPipelineCache cache;
sfricke-samsung6fc3e322022-02-15 22:41:29 -0800752 VkPipelineCacheCreateInfo ci = LvlInitStruct<VkPipelineCacheCreateInfo>();
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600753 VkResult err = vk::CreatePipelineCache(dev.handle(), &ci, NULL, &cache);
Chia-I Wuf368b602015-07-03 10:41:20 +0800754 EXPECT(err == VK_SUCCESS);
Chris Forbes95292b12015-05-25 11:13:26 +1200755 if (err == VK_SUCCESS) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600756 err = vk::CreateGraphicsPipelines(dev.handle(), cache, 1, &info, NULL, &pipe);
Jon Ashburnc669cc62015-07-09 15:02:25 -0600757 if (err == VK_SUCCESS) {
Chia-I Wu2ff72fd2015-07-03 11:49:42 +0800758 NonDispHandle::init(dev.handle(), pipe);
Jon Ashburnc669cc62015-07-09 15:02:25 -0600759 }
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600760 vk::DestroyPipelineCache(dev.handle(), cache, NULL);
Chris Forbes95292b12015-05-25 11:13:26 +1200761 }
762
763 return err;
764}
765
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600766void Pipeline::init(const Device &dev, const VkComputePipelineCreateInfo &info) {
Jon Ashburnc669cc62015-07-09 15:02:25 -0600767 VkPipelineCache cache;
sfricke-samsung6fc3e322022-02-15 22:41:29 -0800768 VkPipelineCacheCreateInfo ci = LvlInitStruct<VkPipelineCacheCreateInfo>();
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600769 VkResult err = vk::CreatePipelineCache(dev.handle(), &ci, NULL, &cache);
Jon Ashburnc669cc62015-07-09 15:02:25 -0600770 if (err == VK_SUCCESS) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600771 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateComputePipelines, dev, cache, 1, &info);
772 vk::DestroyPipelineCache(dev.handle(), cache, NULL);
Jon Ashburnc669cc62015-07-09 15:02:25 -0600773 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800774}
775
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600776NON_DISPATCHABLE_HANDLE_DTOR(PipelineLayout, vk::DestroyPipelineLayout)
Chia-I Wufd46e7d2015-07-03 11:49:42 +0800777
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600778void PipelineLayout::init(const Device &dev, VkPipelineLayoutCreateInfo &info,
779 const std::vector<const DescriptorSetLayout *> &layouts) {
Petr Kraus858bacd2017-12-01 23:10:08 +0100780 const std::vector<VkDescriptorSetLayout> layout_handles = MakeVkHandles<VkDescriptorSetLayout>(layouts);
Petr Kraus65ccc882017-12-03 15:36:03 +0100781 info.setLayoutCount = layout_handles.size();
Tony Barbour482c6092015-07-27 09:37:48 -0600782 info.pSetLayouts = layout_handles.data();
Chia-I Wufd46e7d2015-07-03 11:49:42 +0800783
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600784 NON_DISPATCHABLE_HANDLE_INIT(vk::CreatePipelineLayout, dev, &info);
Chia-I Wufd46e7d2015-07-03 11:49:42 +0800785}
786
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600787NON_DISPATCHABLE_HANDLE_DTOR(Sampler, vk::DestroySampler)
Chia-I Wu8c721c62015-07-03 11:49:42 +0800788
Karl Schultz6addd812016-02-02 17:17:23 -0700789void Sampler::init(const Device &dev, const VkSamplerCreateInfo &info) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600790 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateSampler, dev, &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800791}
792
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600793NON_DISPATCHABLE_HANDLE_DTOR(DescriptorSetLayout, vk::DestroyDescriptorSetLayout)
Chia-I Wuafdfd7f2015-07-03 11:49:42 +0800794
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600795void DescriptorSetLayout::init(const Device &dev, const VkDescriptorSetLayoutCreateInfo &info) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600796 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateDescriptorSetLayout, dev, &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800797}
798
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600799NON_DISPATCHABLE_HANDLE_DTOR(DescriptorPool, vk::DestroyDescriptorPool)
Chia-I Wuafdfd7f2015-07-03 11:49:42 +0800800
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600801void DescriptorPool::init(const Device &dev, const VkDescriptorPoolCreateInfo &info) {
802 setDynamicUsage(info.flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600803 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateDescriptorPool, dev, &info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800804}
805
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600806void DescriptorPool::reset() { EXPECT(vk::ResetDescriptorPool(device(), handle(), 0) == VK_SUCCESS); }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800807
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600808std::vector<DescriptorSet *> DescriptorPool::alloc_sets(const Device &dev,
809 const std::vector<const DescriptorSetLayout *> &layouts) {
Petr Kraus858bacd2017-12-01 23:10:08 +0100810 const std::vector<VkDescriptorSetLayout> layout_handles = MakeVkHandles<VkDescriptorSetLayout>(layouts);
Chia-I Wu11078b02015-01-04 16:27:24 +0800811
Chia-I Wuafdfd7f2015-07-03 11:49:42 +0800812 std::vector<VkDescriptorSet> set_handles;
813 set_handles.resize(layout_handles.size());
Chia-I Wu11078b02015-01-04 16:27:24 +0800814
sfricke-samsung6fc3e322022-02-15 22:41:29 -0800815 VkDescriptorSetAllocateInfo alloc_info = LvlInitStruct<VkDescriptorSetAllocateInfo>();
Jon Ashburnf19916e2016-01-11 13:12:43 -0700816 alloc_info.descriptorSetCount = layout_handles.size();
Courtney Goeltzenleuchterbee18a92015-10-23 14:21:05 -0600817 alloc_info.descriptorPool = handle();
818 alloc_info.pSetLayouts = layout_handles.data();
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600819 VkResult err = vk::AllocateDescriptorSets(device(), &alloc_info, set_handles.data());
Cody Northrop1e4f8022015-08-03 12:47:29 -0600820 EXPECT(err == VK_SUCCESS);
Chia-I Wu11078b02015-01-04 16:27:24 +0800821
822 std::vector<DescriptorSet *> sets;
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600823 for (std::vector<VkDescriptorSet>::const_iterator it = set_handles.begin(); it != set_handles.end(); it++) {
Chia-I Wu11078b02015-01-04 16:27:24 +0800824 // do descriptor sets need memories bound?
Cody Northropcdc72a42015-10-08 11:39:25 -0600825 DescriptorSet *descriptorSet = new DescriptorSet(dev, this, *it);
Mark Lobodzinski40f7f402015-04-16 11:44:05 -0500826 sets.push_back(descriptorSet);
Chia-I Wu11078b02015-01-04 16:27:24 +0800827 }
Chia-I Wu11078b02015-01-04 16:27:24 +0800828 return sets;
829}
830
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600831std::vector<DescriptorSet *> DescriptorPool::alloc_sets(const Device &dev, const DescriptorSetLayout &layout, uint32_t count) {
832 return alloc_sets(dev, std::vector<const DescriptorSetLayout *>(count, &layout));
Chia-I Wu11078b02015-01-04 16:27:24 +0800833}
834
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600835DescriptorSet *DescriptorPool::alloc_sets(const Device &dev, const DescriptorSetLayout &layout) {
Courtney Goeltzenleuchterbee18a92015-10-23 14:21:05 -0600836 std::vector<DescriptorSet *> set = alloc_sets(dev, layout, 1);
Chia-I Wu11078b02015-01-04 16:27:24 +0800837 return (set.empty()) ? NULL : set[0];
838}
839
Mark Lobodzinski5f025572020-03-18 12:13:48 -0600840DescriptorSet::~DescriptorSet() NOEXCEPT {
Tony Barbour67e99152015-07-10 14:10:27 -0600841 if (initialized()) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600842 // Only call vk::Free* on sets allocated from pool with usage *_DYNAMIC
Cody Northropcdc72a42015-10-08 11:39:25 -0600843 if (containing_pool_->getDynamicUsage()) {
Karl Schultz6addd812016-02-02 17:17:23 -0700844 VkDescriptorSet sets[1] = {handle()};
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600845 EXPECT(vk::FreeDescriptorSets(device(), containing_pool_->GetObj(), 1, sets) == VK_SUCCESS);
Cody Northropcdc72a42015-10-08 11:39:25 -0600846 }
Tony Barbour67e99152015-07-10 14:10:27 -0600847 }
848}
Chia-I Wuafdfd7f2015-07-03 11:49:42 +0800849
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600850NON_DISPATCHABLE_HANDLE_DTOR(CommandPool, vk::DestroyCommandPool)
Courtney Goeltzenleuchteree5d80b2015-07-10 19:50:17 -0600851
Karl Schultz6addd812016-02-02 17:17:23 -0700852void CommandPool::init(const Device &dev, const VkCommandPoolCreateInfo &info) {
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600853 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateCommandPool, dev, &info);
Courtney Goeltzenleuchteree5d80b2015-07-10 19:50:17 -0600854}
855
Mark Lobodzinski5f025572020-03-18 12:13:48 -0600856CommandBuffer::~CommandBuffer() NOEXCEPT {
Courtney Goeltzenleuchterbee18a92015-10-23 14:21:05 -0600857 if (initialized()) {
Karl Schultz6addd812016-02-02 17:17:23 -0700858 VkCommandBuffer cmds[] = {handle()};
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600859 vk::FreeCommandBuffers(dev_handle_, cmd_pool_, 1, cmds);
Courtney Goeltzenleuchterbee18a92015-10-23 14:21:05 -0600860 }
Chia-I Wube2b9172015-07-03 11:49:42 +0800861}
862
Mark Lobodzinski722841d2016-09-07 16:34:56 -0600863void CommandBuffer::init(const Device &dev, const VkCommandBufferAllocateInfo &info) {
Chia-I Wu3432a0c2015-10-27 18:04:07 +0800864 VkCommandBuffer cmd;
Chia-I Wube2b9172015-07-03 11:49:42 +0800865
Chia-I Wu3432a0c2015-10-27 18:04:07 +0800866 // Make sure commandPool is set
867 assert(info.commandPool);
Courtney Goeltzenleuchterd8e68bb2015-07-13 12:53:32 -0600868
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600869 if (EXPECT(vk::AllocateCommandBuffers(dev.handle(), &info, &cmd) == VK_SUCCESS)) {
Chia-I Wube2b9172015-07-03 11:49:42 +0800870 Handle::init(cmd);
871 dev_handle_ = dev.handle();
Chia-I Wu3432a0c2015-10-27 18:04:07 +0800872 cmd_pool_ = info.commandPool;
Chia-I Wube2b9172015-07-03 11:49:42 +0800873 }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800874}
875
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600876void CommandBuffer::begin(const VkCommandBufferBeginInfo *info) { EXPECT(vk::BeginCommandBuffer(handle(), info) == VK_SUCCESS); }
Jeremy Hayesd65ae082015-01-14 16:17:08 -0700877
Karl Schultz6addd812016-02-02 17:17:23 -0700878void CommandBuffer::begin() {
sfricke-samsung6fc3e322022-02-15 22:41:29 -0800879 VkCommandBufferBeginInfo info = LvlInitStruct<VkCommandBufferBeginInfo>();
880 VkCommandBufferInheritanceInfo hinfo = LvlInitStruct<VkCommandBufferInheritanceInfo>();
Chia-I Wu3432a0c2015-10-27 18:04:07 +0800881 info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
Jon Ashburnf19916e2016-01-11 13:12:43 -0700882 info.pInheritanceInfo = &hinfo;
Jon Ashburnf19916e2016-01-11 13:12:43 -0700883 hinfo.renderPass = VK_NULL_HANDLE;
884 hinfo.subpass = 0;
885 hinfo.framebuffer = VK_NULL_HANDLE;
886 hinfo.occlusionQueryEnable = VK_FALSE;
887 hinfo.queryFlags = 0;
888 hinfo.pipelineStatistics = 0;
Jeremy Hayesd65ae082015-01-14 16:17:08 -0700889
890 begin(&info);
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800891}
892
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600893void CommandBuffer::end() { EXPECT(vk::EndCommandBuffer(handle()) == VK_SUCCESS); }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800894
Mark Lobodzinskic7a5fcf2019-09-27 14:09:58 -0600895void CommandBuffer::reset(VkCommandBufferResetFlags flags) { EXPECT(vk::ResetCommandBuffer(handle(), flags) == VK_SUCCESS); }
Chia-I Wuf1e2e992014-12-27 14:12:52 +0800896
Jeremy Gebben76d2de32021-08-13 13:26:59 -0600897void RenderPass::init(const Device &dev, const VkRenderPassCreateInfo &info) {
898 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateRenderPass, dev, &info);
899}
900
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -0600901void RenderPass::init(const Device &dev, const VkRenderPassCreateInfo2 &info, bool khr) {
902 if (!khr) {
903 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateRenderPass2, dev, &info);
904 } else {
905 auto vkCreateRenderPass2KHR =
906 reinterpret_cast<PFN_vkCreateRenderPass2KHR>(vk::GetDeviceProcAddr(dev.handle(), "vkCreateRenderPass2KHR"));
907 ASSERT_NE(vkCreateRenderPass2KHR, nullptr);
908 NON_DISPATCHABLE_HANDLE_INIT(vkCreateRenderPass2KHR, dev, &info);
909 }
ziga-lunarg2fd0e8f2022-05-09 20:59:11 +0200910}
911
Jeremy Gebben76d2de32021-08-13 13:26:59 -0600912NON_DISPATCHABLE_HANDLE_DTOR(RenderPass, vk::DestroyRenderPass)
913
914void Framebuffer::init(const Device &dev, const VkFramebufferCreateInfo &info) {
915 NON_DISPATCHABLE_HANDLE_INIT(vk::CreateFramebuffer, dev, &info);
916}
917
918NON_DISPATCHABLE_HANDLE_DTOR(Framebuffer, vk::DestroyFramebuffer)
Petr Kraus13c98a62017-12-09 00:22:39 +0100919} // namespace vk_testing