blob: 88bac28da25dbb30a0a045415f1da65848824d69 [file] [log] [blame]
Lionel Landwerlin2d9f5632022-01-08 01:12:47 +02001/* Copyright (c) 2015-2022 The Khronos Group Inc.
2 * Copyright (c) 2015-2022 Valve Corporation
3 * Copyright (c) 2015-2022 LunarG, Inc.
4 * Copyright (C) 2015-2022 Google Inc.
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -06005 * Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
20 * Author: Tobin Ehlis <tobine@google.com>
21 * Author: Chris Forbes <chrisf@ijw.co.nz>
22 * Author: Mark Lobodzinski <mark@lunarg.com>
23 * Author: Dave Houlton <daveh@lunarg.com>
24 * Author: John Zulauf <jzulauf@lunarg.com>
25 * Author: Tobias Hector <tobias.hector@amd.com>
Jeremy Gebben11af9792021-08-20 10:20:09 -060026 * Author: Jeremy Gebben <jeremyg@lunarg.com>
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -060027 */
28#include "pipeline_state.h"
29#include "descriptor_sets.h"
30#include "cmd_buffer_state.h"
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -060031#include "state_tracker.h"
Jeremy Gebben84b838b2021-08-23 08:41:39 -060032#include "shader_module.h"
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -060033
Jeremy Gebben84b838b2021-08-23 08:41:39 -060034static bool HasWriteableDescriptor(const std::vector<PipelineStageState::DescriptorUse> &descriptor_uses) {
35 return std::any_of(descriptor_uses.begin(), descriptor_uses.end(),
ziga-lunarg1a535052022-03-10 01:04:28 +010036 [](const PipelineStageState::DescriptorUse &use) { return use.second.is_writable; });
Jeremy Gebben84b838b2021-08-23 08:41:39 -060037}
38
39static bool HasAtomicDescriptor(const std::vector<PipelineStageState::DescriptorUse> &descriptor_uses) {
40 return std::any_of(descriptor_uses.begin(), descriptor_uses.end(),
ziga-lunarg1a535052022-03-10 01:04:28 +010041 [](const PipelineStageState::DescriptorUse &use) { return use.second.is_atomic_operation; });
Jeremy Gebben84b838b2021-08-23 08:41:39 -060042}
43
Jeremy Gebben3dfeacf2021-12-02 08:46:39 -070044static bool WrotePrimitiveShadingRate(VkShaderStageFlagBits stage_flag, spirv_inst_iter entrypoint,
sfricke-samsungef15e482022-01-26 11:32:49 -080045 const SHADER_MODULE_STATE *module_state) {
Jeremy Gebben3dfeacf2021-12-02 08:46:39 -070046 bool primitiverate_written = false;
47 if (stage_flag == VK_SHADER_STAGE_VERTEX_BIT || stage_flag == VK_SHADER_STAGE_GEOMETRY_BIT ||
48 stage_flag == VK_SHADER_STAGE_MESH_BIT_NV) {
sfricke-samsungef15e482022-01-26 11:32:49 -080049 for (const auto &set : module_state->GetBuiltinDecorationList()) {
50 auto insn = module_state->at(set.offset);
Jeremy Gebben3dfeacf2021-12-02 08:46:39 -070051 if (set.builtin == spv::BuiltInPrimitiveShadingRateKHR) {
sfricke-samsungef15e482022-01-26 11:32:49 -080052 primitiverate_written = module_state->IsBuiltInWritten(insn, entrypoint);
Jeremy Gebben3dfeacf2021-12-02 08:46:39 -070053 }
54 if (primitiverate_written) {
55 break;
56 }
57 }
58 }
59 return primitiverate_written;
60}
61
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -070062PipelineStageState::PipelineStageState(const safe_VkPipelineShaderStageCreateInfo *stage,
sfricke-samsungef15e482022-01-26 11:32:49 -080063 std::shared_ptr<const SHADER_MODULE_STATE> &module_state)
64 : module_state(module_state),
Jeremy Gebben84b838b2021-08-23 08:41:39 -060065 create_info(stage),
66 stage_flag(stage->stage),
sfricke-samsungef15e482022-01-26 11:32:49 -080067 entrypoint(module_state->FindEntrypoint(stage->pName, stage->stage)),
68 accessible_ids(module_state->MarkAccessibleIds(entrypoint)),
69 descriptor_uses(module_state->CollectInterfaceByDescriptorSlot(accessible_ids)),
Jeremy Gebben84b838b2021-08-23 08:41:39 -060070 has_writable_descriptor(HasWriteableDescriptor(descriptor_uses)),
Jeremy Gebben3dfeacf2021-12-02 08:46:39 -070071 has_atomic_descriptor(HasAtomicDescriptor(descriptor_uses)),
ziga-lunargdab1c1e2022-04-21 23:53:49 +020072 wrote_primitive_shading_rate(WrotePrimitiveShadingRate(stage_flag, entrypoint, module_state.get())),
ziga-lunarg143bdbf2022-05-04 19:04:58 +020073 writes_to_gl_layer(module_state->WritesToGlLayer()),
74 has_input_attachment_capability(module_state->HasInputAttachmentCapability()) {}
Jeremy Gebben84b838b2021-08-23 08:41:39 -060075
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -070076// static
77PIPELINE_STATE::StageStateVec PIPELINE_STATE::GetStageStates(const ValidationStateTracker &state_data,
78 const PIPELINE_STATE &pipe_state) {
Jeremy Gebben84b838b2021-08-23 08:41:39 -060079 PIPELINE_STATE::StageStateVec stage_states;
Jeremy Gebben84b838b2021-08-23 08:41:39 -060080 // shader stages need to be recorded in pipeline order
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -070081 const auto stages = pipe_state.GetShaderStages();
82
Jeremy Gebben84b838b2021-08-23 08:41:39 -060083 for (uint32_t stage_idx = 0; stage_idx < 32; ++stage_idx) {
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -070084 bool stage_found = false;
85 const auto stage = static_cast<VkShaderStageFlagBits>(1 << stage_idx);
86 for (const auto &shader_stage : stages) {
87 if (shader_stage.stage == stage) {
88 auto module = state_data.Get<SHADER_MODULE_STATE>(shader_stage.module);
89 if (!module) {
90 // If module is null and there is a VkShaderModuleCreateInfo in the pNext chain of the stage info, then this
91 // module is part of a library and the state must be created
92 const auto shader_ci = LvlFindInChain<VkShaderModuleCreateInfo>(shader_stage.pNext);
Tony-LunarGcab5d812022-08-04 14:07:32 -060093 const uint32_t unique_shader_id = 0;
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -070094 if (shader_ci) {
Tony-LunarGcab5d812022-08-04 14:07:32 -060095 // TODO GPU-AV rework required to get this value properly
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -070096 module = state_data.CreateShaderModuleState(*shader_ci, unique_shader_id);
97 } else {
Tony-LunarGcab5d812022-08-04 14:07:32 -060098 // shader_module_identifier could legally provide a null module handle
99 VkShaderModuleCreateInfo dummy_module_ci = LvlInitStruct<VkShaderModuleCreateInfo>();
100 dummy_module_ci.pCode = &unique_shader_id; // Ensure tripping invalid spirv
101 module = state_data.CreateShaderModuleState(dummy_module_ci, unique_shader_id);
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -0700102 }
103 }
104 stage_states.emplace_back(&shader_stage, module);
105 stage_found = true;
106 }
107 }
108 if (!stage_found) {
109 // Check if stage has been supplied by a library
110 switch (stage) {
111 case VK_SHADER_STAGE_VERTEX_BIT:
112 if (pipe_state.pre_raster_state && pipe_state.pre_raster_state->vertex_shader) {
113 stage_states.emplace_back(pipe_state.pre_raster_state->vertex_shader_ci,
114 pipe_state.pre_raster_state->vertex_shader);
115 }
116 break;
117 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
118 if (pipe_state.pre_raster_state && pipe_state.pre_raster_state->tessc_shader) {
119 stage_states.emplace_back(pipe_state.pre_raster_state->tessc_shader_ci,
120 pipe_state.pre_raster_state->tessc_shader);
121 }
122 break;
123 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
124 if (pipe_state.pre_raster_state && pipe_state.pre_raster_state->tesse_shader) {
125 stage_states.emplace_back(pipe_state.pre_raster_state->tesse_shader_ci,
126 pipe_state.pre_raster_state->tesse_shader);
127 }
128 break;
129 case VK_SHADER_STAGE_GEOMETRY_BIT:
130 if (pipe_state.pre_raster_state && pipe_state.pre_raster_state->geometry_shader) {
131 stage_states.emplace_back(pipe_state.pre_raster_state->geometry_shader_ci,
132 pipe_state.pre_raster_state->geometry_shader);
133 }
134 break;
135 case VK_SHADER_STAGE_FRAGMENT_BIT:
136 if (pipe_state.fragment_shader_state && pipe_state.fragment_shader_state->fragment_shader) {
137 stage_states.emplace_back(pipe_state.fragment_shader_state->fragment_shader_ci.get(),
138 pipe_state.fragment_shader_state->fragment_shader);
139 }
140 break;
141 default:
142 // no-op
143 break;
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600144 }
145 }
146 }
147 return stage_states;
148}
149
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -0700150// static
151PIPELINE_STATE::ActiveSlotMap PIPELINE_STATE::GetActiveSlots(const StageStateVec &stage_states) {
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600152 PIPELINE_STATE::ActiveSlotMap active_slots;
153 for (const auto &stage : stage_states) {
sfricke-samsungef15e482022-01-26 11:32:49 -0800154 if (stage.entrypoint == stage.module_state->end()) {
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600155 continue;
156 }
157 // Capture descriptor uses for the pipeline
158 for (const auto &use : stage.descriptor_uses) {
159 // While validating shaders capture which slots are used by the pipeline
160 auto &entry = active_slots[use.first.set][use.first.binding];
161 entry.is_writable |= use.second.is_writable;
162
163 auto &reqs = entry.reqs;
sfricke-samsungef15e482022-01-26 11:32:49 -0800164 reqs |= stage.module_state->DescriptorTypeToReqs(use.second.type_id);
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600165 if (use.second.is_atomic_operation) reqs |= DESCRIPTOR_REQ_VIEW_ATOMIC_OPERATION;
166 if (use.second.is_sampler_implicitLod_dref_proj) reqs |= DESCRIPTOR_REQ_SAMPLER_IMPLICITLOD_DREF_PROJ;
167 if (use.second.is_sampler_bias_offset) reqs |= DESCRIPTOR_REQ_SAMPLER_BIAS_OFFSET;
Lionel Landwerlinbc7401b2021-12-07 15:43:05 +0200168 if (use.second.is_read_without_format) reqs |= DESCRIPTOR_REQ_IMAGE_READ_WITHOUT_FORMAT;
169 if (use.second.is_write_without_format) reqs |= DESCRIPTOR_REQ_IMAGE_WRITE_WITHOUT_FORMAT;
Lionel Landwerlincdbe8682021-12-08 15:10:37 +0200170 if (use.second.is_dref_operation) reqs |= DESCRIPTOR_REQ_IMAGE_DREF;
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600171
172 if (use.second.samplers_used_by_image.size()) {
173 if (use.second.samplers_used_by_image.size() > entry.samplers_used_by_image.size()) {
174 entry.samplers_used_by_image.resize(use.second.samplers_used_by_image.size());
175 }
176 uint32_t image_index = 0;
177 for (const auto &samplers : use.second.samplers_used_by_image) {
178 for (const auto &sampler : samplers) {
Jeremy Gebben856b8c62021-12-01 15:20:07 -0700179 entry.samplers_used_by_image[image_index].emplace(sampler);
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600180 }
181 ++image_index;
182 }
183 }
184 }
185 }
186 return active_slots;
187}
188
189static uint32_t GetMaxActiveSlot(const PIPELINE_STATE::ActiveSlotMap &active_slots) {
190 uint32_t max_active_slot = 0;
191 for (const auto &entry : active_slots) {
192 max_active_slot = std::max(max_active_slot, entry.first);
193 }
194 return max_active_slot;
195}
196
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -0700197static uint32_t GetActiveShaders(const PIPELINE_STATE::StageStateVec &stages) {
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600198 uint32_t result = 0;
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -0700199 for (const auto &stage : stages) {
200 result |= stage.stage_flag;
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600201 }
202 return result;
203}
204
205static layer_data::unordered_set<uint32_t> GetFSOutputLocations(const PIPELINE_STATE::StageStateVec &stage_states) {
206 layer_data::unordered_set<uint32_t> result;
207 for (const auto &stage : stage_states) {
sfricke-samsungef15e482022-01-26 11:32:49 -0800208 if (stage.entrypoint == stage.module_state->end()) {
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600209 continue;
210 }
211 if (stage.stage_flag == VK_SHADER_STAGE_FRAGMENT_BIT) {
sfricke-samsungef15e482022-01-26 11:32:49 -0800212 result = stage.module_state->CollectWritableOutputLocationinFS(stage.entrypoint);
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600213 break;
214 }
215 }
216 return result;
217}
218
219static VkPrimitiveTopology GetTopologyAtRasterizer(const PIPELINE_STATE::StageStateVec &stage_states,
220 const safe_VkPipelineInputAssemblyStateCreateInfo *assembly_state) {
221 VkPrimitiveTopology result = assembly_state ? assembly_state->topology : static_cast<VkPrimitiveTopology>(0);
222 for (const auto &stage : stage_states) {
sfricke-samsungef15e482022-01-26 11:32:49 -0800223 if (stage.entrypoint == stage.module_state->end()) {
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600224 continue;
225 }
sfricke-samsungef15e482022-01-26 11:32:49 -0800226 auto stage_topo = stage.module_state->GetTopology(stage.entrypoint);
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600227 if (stage_topo) {
228 result = *stage_topo;
229 }
230 }
231 return result;
232}
233
Nathaniel Cesario52398442022-02-15 16:33:46 -0700234// static
Nathaniel Cesarioef6e2052022-02-28 14:09:45 -0700235std::shared_ptr<VertexInputState> PIPELINE_STATE::CreateVertexInputState(const PIPELINE_STATE &p,
236 const ValidationStateTracker &state,
Nathaniel Cesario52398442022-02-15 16:33:46 -0700237 const safe_VkGraphicsPipelineCreateInfo &create_info) {
238 const auto lib_type = GetGraphicsLibType(create_info);
239 if (lib_type & VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT) { // Vertex input graphics library
Nathaniel Cesarioef6e2052022-02-28 14:09:45 -0700240 return std::make_shared<VertexInputState>(p, create_info);
Nathaniel Cesario52398442022-02-15 16:33:46 -0700241 }
242
243 const auto link_info = LvlFindInChain<VkPipelineLibraryCreateInfoKHR>(create_info.pNext);
244 if (link_info) {
Nathaniel Cesario0f5906a2022-04-05 11:10:18 -0600245 auto ss = GetLibSubState<VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT>(state, *link_info);
Nathaniel Cesario52398442022-02-15 16:33:46 -0700246 if (ss) {
247 return ss;
248 }
Nathaniel Cesariofe8d7862022-02-16 17:27:29 -0700249 } else {
250 if (lib_type == static_cast<VkGraphicsPipelineLibraryFlagsEXT>(0)) { // Not a graphics library
Nathaniel Cesarioef6e2052022-02-28 14:09:45 -0700251 return std::make_shared<VertexInputState>(p, create_info);
Nathaniel Cesariofe8d7862022-02-16 17:27:29 -0700252 }
Nathaniel Cesario52398442022-02-15 16:33:46 -0700253 }
254
255 // We shouldn't get here...
256 return {};
257}
258
259// static
Nathaniel Cesarioef6e2052022-02-28 14:09:45 -0700260std::shared_ptr<PreRasterState> PIPELINE_STATE::CreatePreRasterState(const PIPELINE_STATE &p, const ValidationStateTracker &state,
Nathaniel Cesario711be932022-03-15 13:08:00 -0600261 const safe_VkGraphicsPipelineCreateInfo &create_info,
262 std::shared_ptr<const RENDER_PASS_STATE> rp) {
Nathaniel Cesario52398442022-02-15 16:33:46 -0700263 const auto lib_type = GetGraphicsLibType(create_info);
264 if (lib_type & VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT) { // Pre-raster graphics library
Nathaniel Cesario711be932022-03-15 13:08:00 -0600265 return std::make_shared<PreRasterState>(p, state, create_info, rp);
Nathaniel Cesario52398442022-02-15 16:33:46 -0700266 }
267
268 const auto link_info = LvlFindInChain<VkPipelineLibraryCreateInfoKHR>(create_info.pNext);
269 if (link_info) {
Nathaniel Cesario0f5906a2022-04-05 11:10:18 -0600270 auto ss = GetLibSubState<VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT>(state, *link_info);
Nathaniel Cesario52398442022-02-15 16:33:46 -0700271 if (ss) {
272 return ss;
273 }
Nathaniel Cesariofe8d7862022-02-16 17:27:29 -0700274 } else {
275 if (lib_type == static_cast<VkGraphicsPipelineLibraryFlagsEXT>(0)) { // Not a graphics library
Nathaniel Cesario711be932022-03-15 13:08:00 -0600276 return std::make_shared<PreRasterState>(p, state, create_info, rp);
Nathaniel Cesariofe8d7862022-02-16 17:27:29 -0700277 }
Nathaniel Cesario52398442022-02-15 16:33:46 -0700278 }
279
280 // We shouldn't get here...
281 return {};
282}
283
284// static
285std::shared_ptr<FragmentShaderState> PIPELINE_STATE::CreateFragmentShaderState(
Nathaniel Cesarioef6e2052022-02-28 14:09:45 -0700286 const PIPELINE_STATE &p, const ValidationStateTracker &state, const VkGraphicsPipelineCreateInfo &create_info,
Nathaniel Cesario711be932022-03-15 13:08:00 -0600287 const safe_VkGraphicsPipelineCreateInfo &safe_create_info, std::shared_ptr<const RENDER_PASS_STATE> rp) {
Nathaniel Cesario52398442022-02-15 16:33:46 -0700288 const auto lib_type = GetGraphicsLibType(create_info);
289 if (lib_type & VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT) { // Fragment shader graphics library
Nathaniel Cesario711be932022-03-15 13:08:00 -0600290 return std::make_shared<FragmentShaderState>(p, state, create_info, rp);
Nathaniel Cesario52398442022-02-15 16:33:46 -0700291 }
292
293 const auto link_info = LvlFindInChain<VkPipelineLibraryCreateInfoKHR>(create_info.pNext);
294 if (link_info) {
Nathaniel Cesario0f5906a2022-04-05 11:10:18 -0600295 auto ss = GetLibSubState<VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT>(state, *link_info);
Nathaniel Cesario52398442022-02-15 16:33:46 -0700296 if (ss) {
297 return ss;
298 }
Nathaniel Cesariofe8d7862022-02-16 17:27:29 -0700299 } else {
300 if (lib_type == static_cast<VkGraphicsPipelineLibraryFlagsEXT>(0)) { // Not a graphics library
Nathaniel Cesario711be932022-03-15 13:08:00 -0600301 return std::make_shared<FragmentShaderState>(p, state, safe_create_info, rp);
Nathaniel Cesariofe8d7862022-02-16 17:27:29 -0700302 }
Nathaniel Cesario52398442022-02-15 16:33:46 -0700303 }
304
305 // We shouldn't get here...
306 return {};
307}
308
309// static
310// Pointers that should be ignored have been set to null in safe_create_info, but if this is a graphics library we need the "raw"
311// create_info.
312std::shared_ptr<FragmentOutputState> PIPELINE_STATE::CreateFragmentOutputState(
Nathaniel Cesarioef6e2052022-02-28 14:09:45 -0700313 const PIPELINE_STATE &p, const ValidationStateTracker &state, const VkGraphicsPipelineCreateInfo &create_info,
Nathaniel Cesario711be932022-03-15 13:08:00 -0600314 const safe_VkGraphicsPipelineCreateInfo &safe_create_info, std::shared_ptr<const RENDER_PASS_STATE> rp) {
Nathaniel Cesario52398442022-02-15 16:33:46 -0700315 const auto lib_type = GetGraphicsLibType(create_info);
316 if (lib_type & VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT) { // Fragment output graphics library
Nathaniel Cesario711be932022-03-15 13:08:00 -0600317 return std::make_shared<FragmentOutputState>(p, create_info, rp);
Nathaniel Cesario52398442022-02-15 16:33:46 -0700318 }
319
320 const auto link_info = LvlFindInChain<VkPipelineLibraryCreateInfoKHR>(create_info.pNext);
321 if (link_info) {
Nathaniel Cesario0f5906a2022-04-05 11:10:18 -0600322 auto ss = GetLibSubState<VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT>(state, *link_info);
Nathaniel Cesario52398442022-02-15 16:33:46 -0700323 if (ss) {
324 return ss;
325 }
Nathaniel Cesariofe8d7862022-02-16 17:27:29 -0700326 } else {
327 if (lib_type == static_cast<VkGraphicsPipelineLibraryFlagsEXT>(0)) { // Not a graphics library
Nathaniel Cesario711be932022-03-15 13:08:00 -0600328 return std::make_shared<FragmentOutputState>(p, safe_create_info, rp);
Nathaniel Cesariofe8d7862022-02-16 17:27:29 -0700329 }
Nathaniel Cesario52398442022-02-15 16:33:46 -0700330 }
331
332 // We shouldn't get here...
333 return {};
334}
335
Nathaniel Cesario41f7c802022-03-01 14:28:58 -0700336template <typename Substate>
337void AppendDynamicStateFromSubstate(const Substate &substate, std::vector<VkDynamicState> &dyn_states,
338 VkPipelineDynamicStateCreateFlags &flags) {
339 if (substate) {
340 const auto *dyn_state = substate->parent.DynamicState();
341 if (dyn_state) {
342 flags |= dyn_state->flags;
343 for (uint32_t i = 0; i < dyn_state->dynamicStateCount; ++i) {
344 const auto itr = std::find(dyn_states.cbegin(), dyn_states.cend(), dyn_state->pDynamicStates[i]);
345 if (itr == dyn_states.cend()) {
346 dyn_states.emplace_back(dyn_state->pDynamicStates[i]);
347 }
348 }
349 }
350 }
351}
352
Nathaniel Cesariocb17f982022-06-02 15:33:49 -0600353std::vector<std::shared_ptr<const PIPELINE_LAYOUT_STATE>> PIPELINE_STATE::PipelineLayoutStateUnion() const {
354 std::vector<std::shared_ptr<const PIPELINE_LAYOUT_STATE>> ret;
355 ret.reserve(2);
356 // Only need to check pre-raster _or_ fragment shader layout; if either one is not merged_graphics_layout, then
357 // merged_graphics_layout is a union
358 if (pre_raster_state) {
359 if (pre_raster_state->pipeline_layout != fragment_shader_state->pipeline_layout) {
360 return {pre_raster_state->pipeline_layout, fragment_shader_state->pipeline_layout};
361 } else {
362 return {pre_raster_state->pipeline_layout};
363 }
364 }
365 return {merged_graphics_layout};
366}
367
Nathaniel Cesariod4d8fce2022-05-06 15:01:10 -0600368template <>
369VkPipeline PIPELINE_STATE::BasePipeline<VkGraphicsPipelineCreateInfo>() const {
370 assert(create_info.graphics.sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO);
371 return create_info.graphics.basePipelineHandle;
372}
373template <>
374VkPipeline PIPELINE_STATE::BasePipeline<VkComputePipelineCreateInfo>() const {
375 assert(create_info.compute.sType == VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO);
376 return create_info.compute.basePipelineHandle;
377}
378template <>
379VkPipeline PIPELINE_STATE::BasePipeline<VkRayTracingPipelineCreateInfoKHR>() const {
380 assert(create_info.raytracing.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR);
381 return create_info.raytracing.basePipelineHandle;
382}
383template <>
384VkPipeline PIPELINE_STATE::BasePipeline<VkRayTracingPipelineCreateInfoNV>() const {
385 assert(create_info.raytracing.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV);
386 return create_info.raytracing.basePipelineHandle;
387}
388
389template <>
390int32_t PIPELINE_STATE::BasePipelineIndex<VkGraphicsPipelineCreateInfo>() const {
391 assert(create_info.graphics.sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO);
392 return create_info.graphics.basePipelineIndex;
393}
394template <>
395int32_t PIPELINE_STATE::BasePipelineIndex<VkComputePipelineCreateInfo>() const {
396 assert(create_info.compute.sType == VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO);
397 return create_info.compute.basePipelineIndex;
398}
399template <>
400int32_t PIPELINE_STATE::BasePipelineIndex<VkRayTracingPipelineCreateInfoKHR>() const {
401 assert(create_info.raytracing.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR);
402 return create_info.raytracing.basePipelineIndex;
403}
404template <>
405int32_t PIPELINE_STATE::BasePipelineIndex<VkRayTracingPipelineCreateInfoNV>() const {
406 assert(create_info.raytracing.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV);
407 return create_info.raytracing.basePipelineIndex;
408}
409
410template <>
411VkShaderModule PIPELINE_STATE::PIPELINE_STATE::GetShaderModuleByCIIndex<VkGraphicsPipelineCreateInfo>(uint32_t i) {
412 assert(create_info.graphics.sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO);
413 return create_info.graphics.pStages[i].module;
414}
415template <>
416VkShaderModule PIPELINE_STATE::GetShaderModuleByCIIndex<VkComputePipelineCreateInfo>(uint32_t) {
417 assert(create_info.compute.sType == VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO);
418 return create_info.compute.stage.module;
419}
420template <>
421VkShaderModule PIPELINE_STATE::GetShaderModuleByCIIndex<VkRayTracingPipelineCreateInfoKHR>(uint32_t i) {
422 assert(create_info.raytracing.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR);
423 return create_info.raytracing.pStages[i].module;
424}
425template <>
426VkShaderModule PIPELINE_STATE::GetShaderModuleByCIIndex<VkRayTracingPipelineCreateInfoNV>(uint32_t i) {
427 assert(create_info.raytracing.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV);
428 return create_info.raytracing.pStages[i].module;
429}
430
Jeremy Gebben11af9792021-08-20 10:20:09 -0600431PIPELINE_STATE::PIPELINE_STATE(const ValidationStateTracker *state_data, const VkGraphicsPipelineCreateInfo *pCreateInfo,
432 std::shared_ptr<const RENDER_PASS_STATE> &&rpstate,
433 std::shared_ptr<const PIPELINE_LAYOUT_STATE> &&layout)
434 : BASE_NODE(static_cast<VkPipeline>(VK_NULL_HANDLE), kVulkanObjectTypePipeline),
Nathaniel Cesario711be932022-03-15 13:08:00 -0600435 rp_state(rpstate),
amhagana448ea52021-11-02 14:09:14 -0400436 create_info(pCreateInfo, rpstate),
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -0700437 graphics_lib_type(GetGraphicsLibType(create_info.graphics)),
Nathaniel Cesarioef6e2052022-02-28 14:09:45 -0700438 vertex_input_state(CreateVertexInputState(*this, *state_data, create_info.graphics)),
Nathaniel Cesario711be932022-03-15 13:08:00 -0600439 pre_raster_state(CreatePreRasterState(*this, *state_data, create_info.graphics, rpstate)),
440 fragment_shader_state(CreateFragmentShaderState(*this, *state_data, *pCreateInfo, create_info.graphics, rpstate)),
441 fragment_output_state(CreateFragmentOutputState(*this, *state_data, *pCreateInfo, create_info.graphics, rpstate)),
ziga-lunargeb65bdb2022-04-29 15:24:21 +0200442 rendering_create_info(LvlFindInChain<VkPipelineRenderingCreateInfo>(PNext())),
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -0700443 stage_state(GetStageStates(*state_data, *this)),
444 fragmentShader_writable_output_location_list(GetFSOutputLocations(stage_state)),
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600445 active_slots(GetActiveSlots(stage_state)),
446 max_active_slot(GetMaxActiveSlot(active_slots)),
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -0700447 active_shaders(GetActiveShaders(stage_state)),
Nathaniel Cesario711be932022-03-15 13:08:00 -0600448 topology_at_rasterizer(GetTopologyAtRasterizer(stage_state, create_info.graphics.pInputAssemblyState)) {
Nathaniel Cesario81257cb2022-02-16 17:15:58 -0700449 const auto link_info = LvlFindInChain<VkPipelineLibraryCreateInfoKHR>(PNext());
450 if (link_info) {
Nathaniel Cesario41f7c802022-03-01 14:28:58 -0700451 // accumulate dynamic state
452 // TODO is this correct?
453 auto *dyn_state_ci = const_cast<safe_VkPipelineDynamicStateCreateInfo *>(create_info.graphics.pDynamicState);
454 std::vector<VkDynamicState> dyn_states;
Nathaniel Cesarioc4e8f262022-04-02 21:34:30 -0600455 VkPipelineDynamicStateCreateFlags dyn_flags = 0;
Nathaniel Cesario41f7c802022-03-01 14:28:58 -0700456 if (create_info.graphics.pDynamicState) {
457 std::copy(dyn_state_ci->pDynamicStates, dyn_state_ci->pDynamicStates + dyn_state_ci->dynamicStateCount,
458 std::back_inserter(dyn_states));
459 dyn_flags = dyn_state_ci->flags;
460 }
461 AppendDynamicStateFromSubstate(vertex_input_state, dyn_states, dyn_flags);
462 AppendDynamicStateFromSubstate(pre_raster_state, dyn_states, dyn_flags);
463 AppendDynamicStateFromSubstate(fragment_shader_state, dyn_states, dyn_flags);
464 AppendDynamicStateFromSubstate(fragment_output_state, dyn_states, dyn_flags);
465 if (dyn_states.size() > 0) {
466 // We have dynamic state
Nathaniel Cesarioe1113e42022-03-22 14:49:09 -0600467 if (!dyn_state_ci || (dyn_state_ci->dynamicStateCount < dyn_states.size())) {
Nathaniel Cesario41f7c802022-03-01 14:28:58 -0700468 // There is dynamic state defined in libraries that the is not included in this pipeline's create info
Nathaniel Cesarioe1113e42022-03-22 14:49:09 -0600469 if (!dyn_state_ci) {
470 // *All* dynamic state defined is coming from graphics libraries
471 // NOTE: heap allocation cleaned up in ~safe_VkGraphicsPipelineCreateInfo
472 dyn_state_ci = new safe_VkPipelineDynamicStateCreateInfo;
473 const_cast<safe_VkGraphicsPipelineCreateInfo *>(&create_info.graphics)->pDynamicState = dyn_state_ci;
474 }
Nathaniel Cesario41f7c802022-03-01 14:28:58 -0700475 dyn_state_ci->flags = dyn_flags;
476 dyn_state_ci->dynamicStateCount = static_cast<uint32_t>(dyn_states.size());
477 // NOTE: heap allocation cleaned up in ~safe_VkPipelineDynamicStateCreateInfo
478 dyn_state_ci->pDynamicStates = new VkDynamicState[dyn_states.size()];
479 std::copy(&dyn_states.front(), &dyn_states.front() + dyn_states.size(),
480 const_cast<VkDynamicState *>(dyn_state_ci->pDynamicStates));
481 }
482 }
483
Nathaniel Cesario81257cb2022-02-16 17:15:58 -0700484 const auto &exe_layout_state = state_data->Get<PIPELINE_LAYOUT_STATE>(create_info.graphics.layout);
485 const auto *exe_layout = exe_layout_state.get();
486 const auto *pre_raster_layout =
487 (pre_raster_state && pre_raster_state->pipeline_layout) ? pre_raster_state->pipeline_layout.get() : nullptr;
488 const auto *fragment_shader_layout = (fragment_shader_state && fragment_shader_state->pipeline_layout)
489 ? fragment_shader_state->pipeline_layout.get()
490 : nullptr;
491 std::array<decltype(exe_layout), 3> layouts;
Nathaniel Cesario16ba1ec2022-03-16 11:26:47 -0600492 layouts[0] = exe_layout;
Nathaniel Cesario81257cb2022-02-16 17:15:58 -0700493 layouts[1] = fragment_shader_layout;
Nathaniel Cesario16ba1ec2022-03-16 11:26:47 -0600494 layouts[2] = pre_raster_layout;
Nathaniel Cesario81257cb2022-02-16 17:15:58 -0700495 merged_graphics_layout = std::make_shared<PIPELINE_LAYOUT_STATE>(layouts);
Nathaniel Cesariofe8d7862022-02-16 17:27:29 -0700496
497 // TODO Could store the graphics_lib_type in the sub-state rather than searching for it again here.
498 // Or, could store a pointer back to the owning PIPELINE_STATE.
499 for (uint32_t i = 0; i < link_info->libraryCount; ++i) {
500 const auto &state = state_data->Get<PIPELINE_STATE>(link_info->pLibraries[i]);
501 if (state) {
502 graphics_lib_type |= state->graphics_lib_type;
503 }
504 }
Nathaniel Cesario81257cb2022-02-16 17:15:58 -0700505 }
506}
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -0600507
Jeremy Gebben11af9792021-08-20 10:20:09 -0600508PIPELINE_STATE::PIPELINE_STATE(const ValidationStateTracker *state_data, const VkComputePipelineCreateInfo *pCreateInfo,
509 std::shared_ptr<const PIPELINE_LAYOUT_STATE> &&layout)
510 : BASE_NODE(static_cast<VkPipeline>(VK_NULL_HANDLE), kVulkanObjectTypePipeline),
511 create_info(pCreateInfo),
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -0700512 stage_state(GetStageStates(*state_data, *this)),
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600513 active_slots(GetActiveSlots(stage_state)),
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -0700514 active_shaders(GetActiveShaders(stage_state)),
515 topology_at_rasterizer{},
516 merged_graphics_layout(layout) {
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600517 assert(active_shaders == VK_SHADER_STAGE_COMPUTE_BIT);
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -0600518}
519
Nathaniel Cesarioc8c11c12022-02-16 17:23:50 -0700520PIPELINE_STATE::PIPELINE_STATE(const ValidationStateTracker *state_data, const VkRayTracingPipelineCreateInfoKHR *pCreateInfo,
521 std::shared_ptr<const PIPELINE_LAYOUT_STATE> &&layout)
522 : BASE_NODE(static_cast<VkPipeline>(VK_NULL_HANDLE), kVulkanObjectTypePipeline),
523 create_info(pCreateInfo),
524 stage_state(GetStageStates(*state_data, *this)),
525 active_slots(GetActiveSlots(stage_state)),
526 active_shaders(GetActiveShaders(stage_state)),
527 topology_at_rasterizer{},
528 merged_graphics_layout(std::move(layout)) {
529 assert(0 == (active_shaders &
530 ~(VK_SHADER_STAGE_RAYGEN_BIT_KHR | VK_SHADER_STAGE_ANY_HIT_BIT_KHR | VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR |
531 VK_SHADER_STAGE_MISS_BIT_KHR | VK_SHADER_STAGE_INTERSECTION_BIT_KHR | VK_SHADER_STAGE_CALLABLE_BIT_KHR)));
532}
533
Jeremy Gebben20da7a12022-02-25 14:07:46 -0700534PIPELINE_STATE::PIPELINE_STATE(const ValidationStateTracker *state_data, const VkRayTracingPipelineCreateInfoNV *pCreateInfo,
Jeremy Gebben11af9792021-08-20 10:20:09 -0600535 std::shared_ptr<const PIPELINE_LAYOUT_STATE> &&layout)
536 : BASE_NODE(static_cast<VkPipeline>(VK_NULL_HANDLE), kVulkanObjectTypePipeline),
537 create_info(pCreateInfo),
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -0700538 stage_state(GetStageStates(*state_data, *this)),
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600539 active_slots(GetActiveSlots(stage_state)),
Nathaniel Cesario3fd4f762022-02-16 16:07:06 -0700540 active_shaders(GetActiveShaders(stage_state)),
541 topology_at_rasterizer{},
542 merged_graphics_layout(std::move(layout)) {
Jeremy Gebben84b838b2021-08-23 08:41:39 -0600543 assert(0 == (active_shaders &
544 ~(VK_SHADER_STAGE_RAYGEN_BIT_KHR | VK_SHADER_STAGE_ANY_HIT_BIT_KHR | VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR |
545 VK_SHADER_STAGE_MISS_BIT_KHR | VK_SHADER_STAGE_INTERSECTION_BIT_KHR | VK_SHADER_STAGE_CALLABLE_BIT_KHR)));
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -0600546}
547
Jeremy Gebbenf087ddb2022-08-04 09:36:17 -0600548void LAST_BOUND_STATE::UnbindAndResetPushDescriptorSet(std::shared_ptr<cvdescriptorset::DescriptorSet> &&ds) {
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -0600549 if (push_descriptor_set) {
550 for (auto &ps : per_set) {
Jeremy Gebben4d51c552022-01-06 21:27:15 -0700551 if (ps.bound_descriptor_set == push_descriptor_set) {
Jeremy Gebbenf087ddb2022-08-04 09:36:17 -0600552 cb_state.RemoveChild(ps.bound_descriptor_set);
Jeremy Gebben4d51c552022-01-06 21:27:15 -0700553 ps.bound_descriptor_set.reset();
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -0600554 }
555 }
556 }
Jeremy Gebbenf087ddb2022-08-04 09:36:17 -0600557 cb_state.AddChild(ds);
Jeremy Gebben4d51c552022-01-06 21:27:15 -0700558 push_descriptor_set = std::move(ds);
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -0600559}
560
561void LAST_BOUND_STATE::Reset() {
562 pipeline_state = nullptr;
563 pipeline_layout = VK_NULL_HANDLE;
564 if (push_descriptor_set) {
Jeremy Gebbenf087ddb2022-08-04 09:36:17 -0600565 cb_state.RemoveChild(push_descriptor_set);
Jeremy Gebben4d51c552022-01-06 21:27:15 -0700566 push_descriptor_set->Destroy();
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -0600567 }
Jeremy Gebben4d51c552022-01-06 21:27:15 -0700568 push_descriptor_set.reset();
Jeremy Gebbenadb3eb02021-06-15 12:55:19 -0600569 per_set.clear();
570}