blob: f6f03fd1f196d9123d072ac9b5f1a5df1d93ffb8 [file] [log] [blame]
Aitor Camachoe67f2c72022-06-08 14:41:58 +02001/* Copyright (c) 2019-2022 The Khronos Group Inc.
2 * Copyright (c) 2019-2022 Valve Corporation
3 * Copyright (c) 2019-2022 LunarG, Inc.
4 * Copyright (C) 2019-2022 Google Inc.
John Zulauf11211402019-11-15 14:02:36 -07005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * John Zulauf <jzulauf@lunarg.com>
19 *
20 */
21#include <cassert>
22#include "subresource_adapter.h"
locke-lunarg296a3c92020-03-25 01:04:29 -060023#include "vk_format_utils.h"
locke-lunargf9293012020-04-16 17:01:23 -060024#include <cmath>
Jeremy Gebben1dfbd172021-05-19 14:00:58 -060025#include "image_state.h"
26#include "layer_chassis_dispatch.h"
John Zulauf11211402019-11-15 14:02:36 -070027
28namespace subresource_adapter {
John Zulauf2ea823e2019-11-19 08:54:59 -070029Subresource::Subresource(const RangeEncoder& encoder, const VkImageSubresource& subres)
30 : VkImageSubresource({0, subres.mipLevel, subres.arrayLayer}), aspect_index() {
31 aspect_index = encoder.LowerBoundFromMask(subres.aspectMask);
32 aspectMask = encoder.AspectBit(aspect_index);
33}
34
35IndexType RangeEncoder::Encode1AspectArrayOnly(const Subresource& pos) const { return pos.arrayLayer; }
36IndexType RangeEncoder::Encode1AspectMipArray(const Subresource& pos) const { return pos.arrayLayer + pos.mipLevel * mip_size_; }
37IndexType RangeEncoder::Encode1AspectMipOnly(const Subresource& pos) const { return pos.mipLevel; }
John Zulauf11211402019-11-15 14:02:36 -070038
39IndexType RangeEncoder::EncodeAspectArrayOnly(const Subresource& pos) const {
John Zulauf2ea823e2019-11-19 08:54:59 -070040 return pos.arrayLayer + aspect_base_[pos.aspect_index];
John Zulauf11211402019-11-15 14:02:36 -070041}
42IndexType RangeEncoder::EncodeAspectMipArray(const Subresource& pos) const {
John Zulauf2ea823e2019-11-19 08:54:59 -070043 return pos.arrayLayer + pos.mipLevel * mip_size_ + aspect_base_[pos.aspect_index];
John Zulauf11211402019-11-15 14:02:36 -070044}
John Zulauf2ea823e2019-11-19 08:54:59 -070045IndexType RangeEncoder::EncodeAspectMipOnly(const Subresource& pos) const { return pos.mipLevel + aspect_base_[pos.aspect_index]; }
John Zulauf11211402019-11-15 14:02:36 -070046
John Zulauf2ea823e2019-11-19 08:54:59 -070047uint32_t RangeEncoder::LowerBoundImpl1(VkImageAspectFlags aspect_mask) const {
48 assert(aspect_mask & aspect_bits_[0]);
49 return 0;
50}
51uint32_t RangeEncoder::LowerBoundWithStartImpl1(VkImageAspectFlags aspect_mask, uint32_t start) const {
52 assert(start == 0);
53 if (aspect_mask & aspect_bits_[0]) {
54 return 0;
55 }
56 return limits_.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -070057}
58
John Zulauf2ea823e2019-11-19 08:54:59 -070059uint32_t RangeEncoder::LowerBoundImpl2(VkImageAspectFlags aspect_mask) const {
60 if (aspect_mask & aspect_bits_[0]) {
61 return 0;
62 }
63 assert(aspect_mask & aspect_bits_[1]);
64 return 1;
John Zulauf11211402019-11-15 14:02:36 -070065}
John Zulauf2ea823e2019-11-19 08:54:59 -070066uint32_t RangeEncoder::LowerBoundWithStartImpl2(VkImageAspectFlags aspect_mask, uint32_t start) const {
John Zulauf5823c622019-11-25 13:33:44 -070067 switch (start) {
68 case 0:
69 if (aspect_mask & aspect_bits_[0]) {
70 return 0;
71 }
72 // no break
73 case 1:
74 if (aspect_mask & aspect_bits_[1]) {
75 return 1;
76 }
77 break;
78 default:
79 break;
John Zulauf2ea823e2019-11-19 08:54:59 -070080 }
81 return limits_.aspect_index;
82}
83
84uint32_t RangeEncoder::LowerBoundImpl3(VkImageAspectFlags aspect_mask) const {
85 if (aspect_mask & aspect_bits_[0]) {
86 return 0;
87 } else if (aspect_mask & aspect_bits_[1]) {
88 return 1;
89 } else {
90 assert(aspect_mask & aspect_bits_[2]);
91 return 2;
92 }
93}
94
95uint32_t RangeEncoder::LowerBoundWithStartImpl3(VkImageAspectFlags aspect_mask, uint32_t start) const {
John Zulauf5823c622019-11-25 13:33:44 -070096 switch (start) {
97 case 0:
98 if (aspect_mask & aspect_bits_[0]) {
99 return 0;
100 }
101 // no break
102 case 1:
103 if ((aspect_mask & aspect_bits_[1])) {
104 return 1;
105 }
106 // no break
107 case 2:
108 if ((aspect_mask & aspect_bits_[2])) {
109 return 2;
110 }
111 break;
112 default:
113 break;
John Zulauf2ea823e2019-11-19 08:54:59 -0700114 }
115 return limits_.aspect_index;
116}
117
John Zulauf11211402019-11-15 14:02:36 -0700118void RangeEncoder::PopulateFunctionPointers() {
119 // Select the encode/decode specialists
120 if (limits_.aspect_index == 1) {
121 // One aspect use simplified encode/decode math
John Zulauf2ea823e2019-11-19 08:54:59 -0700122 if (limits_.arrayLayer == 1) { // Same as mip_size_ == 1
John Zulauf11211402019-11-15 14:02:36 -0700123 encode_function_ = &RangeEncoder::Encode1AspectMipOnly;
124 decode_function_ = &RangeEncoder::DecodeAspectMipOnly<1>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700125 } else if (limits_.mipLevel == 1) {
John Zulauf11211402019-11-15 14:02:36 -0700126 encode_function_ = &RangeEncoder::Encode1AspectArrayOnly;
127 decode_function_ = &RangeEncoder::DecodeAspectArrayOnly<1>;
128 } else {
129 encode_function_ = &RangeEncoder::Encode1AspectMipArray;
130 decode_function_ = &RangeEncoder::DecodeAspectMipArray<1>;
131 }
John Zulauf2ea823e2019-11-19 08:54:59 -0700132 lower_bound_function_ = &RangeEncoder::LowerBoundImpl1;
133 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl1;
John Zulauf11211402019-11-15 14:02:36 -0700134 } else if (limits_.aspect_index == 2) {
135 // Two aspect use simplified encode/decode math
John Zulauf2ea823e2019-11-19 08:54:59 -0700136 if (limits_.arrayLayer == 1) { // Same as mip_size_ == 1
John Zulauf11211402019-11-15 14:02:36 -0700137 encode_function_ = &RangeEncoder::EncodeAspectMipOnly;
138 decode_function_ = &RangeEncoder::DecodeAspectMipOnly<2>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700139 } else if (limits_.mipLevel == 1) {
John Zulauf11211402019-11-15 14:02:36 -0700140 encode_function_ = &RangeEncoder::EncodeAspectArrayOnly;
141 decode_function_ = &RangeEncoder::DecodeAspectArrayOnly<2>;
142 } else {
143 encode_function_ = &RangeEncoder::EncodeAspectMipArray;
144 decode_function_ = &RangeEncoder::DecodeAspectMipArray<2>;
145 }
John Zulauf2ea823e2019-11-19 08:54:59 -0700146 lower_bound_function_ = &RangeEncoder::LowerBoundImpl2;
147 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl2;
John Zulauf11211402019-11-15 14:02:36 -0700148 } else {
149 encode_function_ = &RangeEncoder::EncodeAspectMipArray;
150 decode_function_ = &RangeEncoder::DecodeAspectMipArray<3>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700151 lower_bound_function_ = &RangeEncoder::LowerBoundImpl3;
152 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl3;
John Zulauf11211402019-11-15 14:02:36 -0700153 }
154
155 // Initialize the offset array
156 aspect_base_[0] = 0;
157 for (uint32_t i = 1; i < limits_.aspect_index; ++i) {
158 aspect_base_[i] = aspect_base_[i - 1] + aspect_size_;
159 }
160}
John Zulauf11211402019-11-15 14:02:36 -0700161
162RangeEncoder::RangeEncoder(const VkImageSubresourceRange& full_range, const AspectParameters* param)
locke-lunarg296a3c92020-03-25 01:04:29 -0600163 : limits_(param->AspectMask(), full_range.levelCount, full_range.layerCount, param->AspectCount()),
164 full_range_(full_range),
John Zulauf11211402019-11-15 14:02:36 -0700165 mip_size_(full_range.layerCount),
166 aspect_size_(mip_size_ * full_range.levelCount),
167 aspect_bits_(param->AspectBits()),
168 mask_index_function_(param->MaskToIndexFunction()),
169 encode_function_(nullptr),
170 decode_function_(nullptr) {
171 // Only valid to create an encoder for a *whole* image (i.e. base must be zero, and the specified limits_.selected_aspects
172 // *must* be equal to the traits aspect mask. (Encoder range assumes zero bases)
John Zulauf2ea823e2019-11-19 08:54:59 -0700173 assert(full_range.aspectMask == limits_.aspectMask);
John Zulauf11211402019-11-15 14:02:36 -0700174 assert(full_range.baseArrayLayer == 0);
175 assert(full_range.baseMipLevel == 0);
176 // TODO: should be some static assert
177 assert(param->AspectCount() <= kMaxSupportedAspect);
178 PopulateFunctionPointers();
179}
180
Nathaniel Cesario47bbe902021-02-10 16:27:37 -0700181#ifndef NDEBUG
locke-lunarg5faaff52020-02-27 14:31:11 -0700182static bool IsValid(const RangeEncoder& encoder, const VkImageSubresourceRange& bounds) {
183 const auto& limits = encoder.Limits();
184 return (((bounds.aspectMask & limits.aspectMask) == bounds.aspectMask) &&
185 (bounds.baseMipLevel + bounds.levelCount <= limits.mipLevel) &&
186 (bounds.baseArrayLayer + bounds.layerCount <= limits.arrayLayer));
187}
Nathaniel Cesario47bbe902021-02-10 16:27:37 -0700188#endif
locke-lunarg5faaff52020-02-27 14:31:11 -0700189
John Zulauf11211402019-11-15 14:02:36 -0700190// Create an iterator like "generator" that for each increment produces the next index range matching the
191// next contiguous (in index space) section of the VkImageSubresourceRange
192// Ranges will always span the layerCount layers, and if the layerCount is the full range of the image (as known by
193// the encoder) will span the levelCount mip levels as weill.
John Zulauf11211402019-11-15 14:02:36 -0700194RangeGenerator::RangeGenerator(const RangeEncoder& encoder, const VkImageSubresourceRange& subres_range)
195 : encoder_(&encoder), isr_pos_(encoder, subres_range), pos_(), aspect_base_() {
Jeremy Gebben11a68a32021-07-29 11:59:22 -0600196 assert((((isr_pos_.Limits()).aspectMask & (encoder.Limits()).aspectMask) == (isr_pos_.Limits()).aspectMask));
197 assert((isr_pos_.Limits()).baseMipLevel + (isr_pos_.Limits()).levelCount <= (encoder.Limits()).mipLevel);
198 assert((isr_pos_.Limits()).baseArrayLayer + (isr_pos_.Limits()).layerCount <= (encoder.Limits()).arrayLayer);
John Zulauf11211402019-11-15 14:02:36 -0700199
John Zulauf2ea823e2019-11-19 08:54:59 -0700200 // To see if we have a full range special case, need to compare the subres_range against the *encoders* limits
201 const auto& limits = encoder.Limits();
202 if ((subres_range.baseArrayLayer == 0 && subres_range.layerCount == limits.arrayLayer)) {
203 if ((subres_range.baseMipLevel == 0) && (subres_range.levelCount == limits.mipLevel)) {
204 if (subres_range.aspectMask == limits.aspectMask) {
205 // Full range
206 pos_.begin = 0;
207 pos_.end = encoder.AspectSize() * limits.aspect_index;
208 aspect_count_ = 1; // Flag this to never advance aspects.
209 } else {
210 // All mips all layers but not all aspect
211 pos_.begin = encoder.AspectBase(isr_pos_.aspect_index);
212 pos_.end = pos_.begin + encoder.AspectSize();
213 aspect_count_ = limits.aspect_index;
214 }
215 } else {
216 // All array layers, but not all levels
217 pos_.begin = encoder.AspectBase(isr_pos_.aspect_index) + subres_range.baseMipLevel * encoder.MipSize();
218 pos_.end = pos_.begin + subres_range.levelCount * encoder.MipSize();
219 aspect_count_ = limits.aspect_index;
220 }
221
John Zulauf11211402019-11-15 14:02:36 -0700222 // Full set of array layers at a time, thus we can span across all selected mip levels
John Zulauf11211402019-11-15 14:02:36 -0700223 mip_count_ = 1; // we don't ever advance across mips, as we do all of then in one range
224 } else {
225 // Each range covers all included array_layers for each selected mip_level for each given selected aspect
John Zulauf2ea823e2019-11-19 08:54:59 -0700226 // so we'll use the general purpose encode and smallest range size
227 pos_.begin = encoder.Encode(isr_pos_);
228 pos_.end = pos_.begin + subres_range.layerCount;
229
John Zulauf11211402019-11-15 14:02:36 -0700230 // we do have to traverse across mips, though (other than Encode abover), we don't have to know which one we are on.
231 mip_count_ = subres_range.levelCount;
John Zulauf2ea823e2019-11-19 08:54:59 -0700232 aspect_count_ = limits.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -0700233 }
234
John Zulauf2ea823e2019-11-19 08:54:59 -0700235 // To get to the next aspect range we offset from the last base
John Zulauf11211402019-11-15 14:02:36 -0700236 aspect_base_ = pos_;
237 mip_index_ = 0;
John Zulauf2ea823e2019-11-19 08:54:59 -0700238 aspect_index_ = isr_pos_.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -0700239}
240
241RangeGenerator& RangeGenerator::operator++() {
242 mip_index_++;
243 // NOTE: If all selected mip levels are done at once, mip_count_ is set to one, not the number of selected mip_levels
244 if (mip_index_ >= mip_count_) {
John Zulauf2ea823e2019-11-19 08:54:59 -0700245 const auto last_aspect_index = aspect_index_;
246 // Seek the next value aspect (if any)
247 aspect_index_ = encoder_->LowerBoundFromMask(isr_pos_.Limits().aspectMask, aspect_index_ + 1);
248 if (aspect_index_ < aspect_count_) {
249 // Force isr_pos to the beginning of this found aspect
John Zulaufdd18b3a2019-11-20 08:30:23 -0700250 isr_pos_.SeekAspect(aspect_index_);
John Zulauf2ea823e2019-11-19 08:54:59 -0700251 // SubresourceGenerator should never be at tombstones we we aren't
252 assert(isr_pos_.aspectMask != 0);
253
254 // Offset by the distance between the last start of aspect and *this* start of aspect
255 aspect_base_ += (encoder_->AspectBase(isr_pos_.aspect_index) - encoder_->AspectBase(last_aspect_index));
John Zulauf11211402019-11-15 14:02:36 -0700256 pos_ = aspect_base_;
257 mip_index_ = 0;
258 } else {
John Zulauf2ea823e2019-11-19 08:54:59 -0700259 // Tombstone both index range and subresource positions to "At end" convention
John Zulauf11211402019-11-15 14:02:36 -0700260 pos_ = {0, 0};
John Zulauf2ea823e2019-11-19 08:54:59 -0700261 isr_pos_.aspectMask = 0;
John Zulauf11211402019-11-15 14:02:36 -0700262 }
John Zulauf11211402019-11-15 14:02:36 -0700263 } else {
264 // Note: for the layerCount < full_range.layerCount case, because the generated ranges per mip_level are discontinuous
John Zulauf2ea823e2019-11-19 08:54:59 -0700265 // we have to do each individual array of ranges
John Zulauf11211402019-11-15 14:02:36 -0700266 pos_ += encoder_->MipSize();
John Zulaufdd18b3a2019-11-20 08:30:23 -0700267 isr_pos_.SeekMip(isr_pos_.Limits().baseMipLevel + mip_index_);
John Zulauf11211402019-11-15 14:02:36 -0700268 }
269 return *this;
270}
271
locke-lunargae26eac2020-04-16 15:29:05 -0600272ImageRangeEncoder::ImageRangeEncoder(const IMAGE_STATE& image)
273 : ImageRangeEncoder(image, AspectParameters::Get(image.full_range.aspectMask)) {}
locke-lunarg5faaff52020-02-27 14:31:11 -0700274
locke-lunargae26eac2020-04-16 15:29:05 -0600275ImageRangeEncoder::ImageRangeEncoder(const IMAGE_STATE& image, const AspectParameters* param)
John Zulauf29d00532021-03-04 13:28:54 -0700276 : RangeEncoder(image.full_range, param), image_(&image), total_size_(0U) {
locke-lunarge3d93b02020-06-22 18:36:28 -0600277 if (image_->createInfo.extent.depth > 1) {
278 limits_.arrayLayer = image_->createInfo.extent.depth;
279 }
locke-lunarg296a3c92020-03-25 01:04:29 -0600280 VkSubresourceLayout layout = {};
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600281 VkImageSubresource subres = {};
locke-lunarg296a3c92020-03-25 01:04:29 -0600282 VkImageSubresourceLayers subres_layers = {limits_.aspectMask, 0, 0, limits_.arrayLayer};
John Zulauf110413c2021-03-20 05:38:38 -0600283 linear_image_ = false;
locke-lunarg3f6978b2020-04-16 16:51:35 -0600284
285 // WORKAROUND for dev_sim and mock_icd not containing valid VkSubresourceLayout yet. Treat it as optimal image.
nyorain884a3ec2021-04-11 18:13:53 +0200286 if (image_->createInfo.tiling == VK_IMAGE_TILING_LINEAR) {
locke-lunarg3f6978b2020-04-16 16:51:35 -0600287 subres = {static_cast<VkImageAspectFlags>(AspectBit(0)), 0, 0};
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600288 DispatchGetImageSubresourceLayout(image_->store_device_as_workaround, image_->image(), &subres, &layout);
locke-lunarg3f6978b2020-04-16 16:51:35 -0600289 if (layout.size > 0) {
John Zulauf110413c2021-03-20 05:38:38 -0600290 linear_image_ = true;
locke-lunarg3f6978b2020-04-16 16:51:35 -0600291 }
292 }
locke-lunarg5faaff52020-02-27 14:31:11 -0700293
John Zulauf110413c2021-03-20 05:38:38 -0600294 is_compressed_ = FormatIsCompressed(image.createInfo.format);
295 texel_extent_ = FormatTexelBlockExtent(image.createInfo.format);
locke-lunarg20485372020-07-16 01:58:46 -0600296
John Zulauf110413c2021-03-20 05:38:38 -0600297 is_3_d_ = image_->createInfo.imageType == VK_IMAGE_TYPE_3D;
298 y_interleave_ = false;
299 for (uint32_t aspect_index = 0; aspect_index < limits_.aspect_index; ++aspect_index) {
300 subres.aspectMask = static_cast<VkImageAspectFlags>(AspectBit(aspect_index));
301 subres_layers.aspectMask = subres.aspectMask;
302 texel_sizes_.push_back(FormatTexelSize(image.createInfo.format, subres.aspectMask));
303 IndexType aspect_size = 0;
304 for (uint32_t mip_index = 0; mip_index < limits_.mipLevel; ++mip_index) {
305 subres_layers.mipLevel = mip_index;
306 subres.mipLevel = mip_index;
Jeremy Gebben1dfbd172021-05-19 14:00:58 -0600307 auto subres_extent = image_->GetSubresourceExtent(subres_layers);
locke-lunarg20485372020-07-16 01:58:46 -0600308
John Zulauf110413c2021-03-20 05:38:38 -0600309 if (linear_image_) {
Jeremy Gebben14b0d1a2021-05-15 20:15:41 -0600310 DispatchGetImageSubresourceLayout(image_->store_device_as_workaround, image_->image(), &subres, &layout);
John Zulauf110413c2021-03-20 05:38:38 -0600311 if (is_3_d_) {
312 if ((layout.depthPitch == 0) && (subres_extent.depth == 1)) {
313 layout.depthPitch = layout.size; // Certain implmentations don't supply pitches when size is 1
314 }
315 y_interleave_ = y_interleave_ || (layout.rowPitch > layout.depthPitch);
316 } else {
317 if ((layout.arrayPitch == 0) && (limits_.arrayLayer == 1)) {
318 layout.arrayPitch = layout.size; // Certain implmentations don't supply pitches when size is 1
319 }
320 y_interleave_ = y_interleave_ || (layout.rowPitch > layout.arrayPitch);
321 }
locke-lunarg3f6978b2020-04-16 16:51:35 -0600322 } else {
locke-lunarg3f6978b2020-04-16 16:51:35 -0600323 layout.offset += layout.size;
locke-lunarg20485372020-07-16 01:58:46 -0600324 layout.rowPitch = static_cast<VkDeviceSize>(floor(subres_extent.width * texel_sizes_[aspect_index]));
325 layout.arrayPitch = layout.rowPitch * subres_extent.height;
locke-lunarg3f6978b2020-04-16 16:51:35 -0600326 layout.depthPitch = layout.arrayPitch;
John Zulauf110413c2021-03-20 05:38:38 -0600327 if (is_3_d_) {
John Zulauf5eb24252020-08-28 17:53:58 -0600328 layout.size = layout.depthPitch * subres_extent.depth;
329 } else {
330 // 2D arrays are not affected by MIP level extent reductions.
331 layout.size = layout.arrayPitch * limits_.arrayLayer;
332 }
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600333 }
John Zulauf110413c2021-03-20 05:38:38 -0600334 subres_info_.emplace_back(layout, subres_extent, texel_extent_, texel_sizes_[aspect_index]);
335 aspect_size += layout.size;
John Zulauf29d00532021-03-04 13:28:54 -0700336 total_size_ += layout.size;
locke-lunarg5faaff52020-02-27 14:31:11 -0700337 }
John Zulauf110413c2021-03-20 05:38:38 -0600338 aspect_sizes_.emplace_back(aspect_size);
locke-lunarg5faaff52020-02-27 14:31:11 -0700339 }
locke-lunarg5faaff52020-02-27 14:31:11 -0700340}
John Zulauf110413c2021-03-20 05:38:38 -0600341
342IndexType ImageRangeEncoder::Encode2D(const VkSubresourceLayout& layout, uint32_t layer, uint32_t aspect_index,
343 const VkOffset3D& offset) const {
344 assert(offset.z == 0U);
345 return layout.offset + layer * layout.arrayPitch + offset.y * layout.rowPitch +
346 (offset.x ? static_cast<IndexType>(floor(offset.x * texel_sizes_[aspect_index])) : 0U);
347}
348
349IndexType ImageRangeEncoder::Encode3D(const VkSubresourceLayout& layout, uint32_t aspect_index, const VkOffset3D& offset) const {
350 return layout.offset + offset.z * layout.depthPitch + offset.y * layout.rowPitch +
351 (offset.x ? static_cast<IndexType>(floor(offset.x * texel_sizes_[aspect_index])) : 0U);
locke-lunarg296a3c92020-03-25 01:04:29 -0600352}
locke-lunarg5faaff52020-02-27 14:31:11 -0700353
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600354void ImageRangeEncoder::Decode(const VkImageSubresource& subres, const IndexType& encode, uint32_t& out_layer,
355 VkOffset3D& out_offset) const {
John Zulauf110413c2021-03-20 05:38:38 -0600356 uint32_t subres_index = GetSubresourceIndex(LowerBoundFromMask(subres.aspectMask), subres.mipLevel);
357 const auto& subres_layout = GetSubresourceInfo(subres_index).layout;
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600358 IndexType decode = encode - subres_layout.offset;
359 out_layer = static_cast<uint32_t>(decode / subres_layout.arrayPitch);
360 decode -= (out_layer * subres_layout.arrayPitch);
361 out_offset.z = static_cast<int32_t>(decode / subres_layout.depthPitch);
362 decode -= (out_offset.z * subres_layout.depthPitch);
363 out_offset.y = static_cast<int32_t>(decode / subres_layout.rowPitch);
364 decode -= (out_offset.y * subres_layout.rowPitch);
Mark Young0ec6b062020-11-19 15:32:17 -0700365 out_offset.x = static_cast<int32_t>(static_cast<double>(decode) / texel_sizes_[LowerBoundFromMask(subres.aspectMask)]);
locke-lunarg296a3c92020-03-25 01:04:29 -0600366}
367
locke-lunarg296a3c92020-03-25 01:04:29 -0600368
locke-lunarg3c038002020-04-30 23:08:08 -0600369inline VkImageSubresourceRange GetRemaining(const VkImageSubresourceRange& full_range, VkImageSubresourceRange subres_range) {
370 if (subres_range.levelCount == VK_REMAINING_MIP_LEVELS) {
371 subres_range.levelCount = full_range.levelCount - subres_range.baseMipLevel;
372 }
373 if (subres_range.layerCount == VK_REMAINING_ARRAY_LAYERS) {
374 subres_range.layerCount = full_range.layerCount - subres_range.baseArrayLayer;
375 }
376 return subres_range;
377}
John Zulauf110413c2021-03-20 05:38:38 -0600378inline bool CoversAllLayers(const VkImageSubresourceRange& full_range, VkImageSubresourceRange subres_range) {
379 return (subres_range.baseArrayLayer == 0) && (subres_range.layerCount == full_range.layerCount);
380}
381inline bool CoversAllLevels(const VkImageSubresourceRange& full_range, VkImageSubresourceRange subres_range) {
382 return (subres_range.baseMipLevel == 0) && (subres_range.layerCount == full_range.levelCount);
383}
384inline bool CoversAllAspects(const VkImageSubresourceRange& full_range, VkImageSubresourceRange subres_range) {
385 return full_range.aspectMask == subres_range.aspectMask;
386}
locke-lunarg3c038002020-04-30 23:08:08 -0600387
John Zulauf555e88f2020-10-21 15:16:41 -0600388static bool SubresourceRangeIsEmpty(const VkImageSubresourceRange& range) {
389 return (0 == range.aspectMask) || (0 == range.levelCount) || (0 == range.layerCount);
390}
391static bool ExtentIsEmpty(const VkExtent3D& extent) { return (0 == extent.width) || (0 == extent.height) || (0 == extent.width); }
392
John Zulauf110413c2021-03-20 05:38:38 -0600393void ImageRangeGenerator::SetInitialPosFullOffset(uint32_t layer, uint32_t aspect_index) {
394 const bool is_3D = encoder_->Is3D();
395 const auto& subres_layout = subres_info_->layout;
396 const IndexType encode_base = is_3D ? encoder_->Encode3D(subres_layout, aspect_index, offset_)
397 : encoder_->Encode2D(subres_layout, layer, aspect_index, offset_);
398 const IndexType base = base_address_ + encode_base;
399 // To deal with compressed formats the span must cover the y-extent of lines (something we resmember in the y_step)
400 const IndexType span = static_cast<IndexType>(floor(encoder_->TexelSize(aspect_index) * (extent_.width * incr_state_.y_step)));
401
402 const uint32_t z_count = is_3D ? extent_.depth : subres_range_.layerCount;
403 const IndexType z_pitch = is_3D ? subres_info_->z_step_pitch : subres_layout.arrayPitch;
404 incr_state_.Set(extent_.height, z_count, base, span, subres_info_->y_step_pitch, z_pitch);
405}
406
407void ImageRangeGenerator::SetInitialPosFullWidth(uint32_t layer, uint32_t aspect_index) {
408 assert(!encoder_->IsInterleaveY() && (offset_.x == 0));
409 const bool is_3D = encoder_->Is3D();
410 const auto& subres_layout = subres_info_->layout;
411 const IndexType encode_base = is_3D ? encoder_->Encode3D(subres_layout, aspect_index, offset_)
412 : encoder_->Encode2D(subres_layout, layer, aspect_index, offset_);
413 const IndexType base = base_address_ + encode_base;
414 // Height must be in multiples of y_step (the texel dimension)... validated elsewhere
415 const IndexType span = subres_layout.rowPitch * extent_.height;
416
417 const uint32_t z_count = is_3D ? extent_.depth : subres_range_.layerCount;
418 const IndexType z_pitch = is_3D ? subres_info_->z_step_pitch : subres_layout.arrayPitch;
419 incr_state_.Set(1U, z_count, base, span, subres_info_->y_step_pitch, z_pitch);
420}
421
422void ImageRangeGenerator::SetInitialPosFullHeight(uint32_t layer, uint32_t aspect_index) {
423 assert(!encoder_->Is3D() && (offset_.x == 0) && (offset_.y == 0));
424 const auto& subres_layout = subres_info_->layout;
425 const IndexType base = base_address_ + subres_layout.offset + subres_range_.baseArrayLayer * subres_layout.arrayPitch;
426 const IndexType span = subres_info_->layer_span;
427 const IndexType z_step = subres_layout.arrayPitch;
428
429 incr_state_.Set(1, subres_range_.layerCount, base, span, span, z_step);
430}
431
432void ImageRangeGenerator::SetInitialPosSomeDepth(uint32_t layer, uint32_t aspect_index) {
433 assert(encoder_->Is3D() && (offset_.x == 0) && (offset_.y == 0) && (layer == 0));
434 const auto& subres_layout = subres_info_->layout;
435 const IndexType encode_base = encoder_->Encode3D(subres_layout, aspect_index, offset_);
436 const IndexType base = base_address_ + encode_base;
437 // Height must be in multiples of z_step (the texel dimension)... validated elsewhere
438 const IndexType span = subres_layout.depthPitch * extent_.depth;
439
440 incr_state_.Set(1, 1, base, span, span, subres_layout.size);
441}
442
443void ImageRangeGenerator::SetInitialPosFullDepth(uint32_t layer, uint32_t aspect_index) {
444 assert(encoder_->Is3D() && (offset_.x == 0) && (offset_.y == 0) && (offset_.z == 0) && (layer == 0));
445 const auto& subres_layout = subres_info_->layout;
446 const IndexType base = base_address_ + subres_layout.offset;
447 const IndexType span = subres_layout.depthPitch * extent_.depth;
448
449 incr_state_.Set(1, 1, base, span, span, span);
450}
451
452void ImageRangeGenerator::SetInitialPosSomeLayers(uint32_t layer, uint32_t aspect_index) {
453 assert(!encoder_->Is3D() && (offset_.x == 0) && (offset_.y == 0) && (offset_.z == 0));
454 const auto& subres_layout = subres_info_->layout;
455 const IndexType base = base_address_ + subres_layout.offset + layer * subres_layout.arrayPitch;
456 const IndexType span = subres_layout.arrayPitch * subres_range_.layerCount;
457 const IndexType z_step = subres_layout.arrayPitch * encoder_->Limits().arrayLayer;
458 incr_state_.Set(1, 1, base, span, span, z_step);
459}
460
461void ImageRangeGenerator::SetInitialPosAllLayers(uint32_t layer, uint32_t aspect_index) {
462 assert(!encoder_->Is3D() && (offset_.x == 0) && (offset_.y == 0) && (offset_.z == 0) &&
463 (layer == 0));
464 const auto& subres_layout = subres_info_->layout;
465 const IndexType base = base_address_ + subres_layout.offset;
466 const IndexType span = subres_layout.arrayPitch * subres_range_.layerCount;
467 incr_state_.Set(1, 1, base, span, span, span);
468}
469
470void ImageRangeGenerator::SetInitialPosOneAspect(uint32_t layer, uint32_t aspect_index) {
471 assert(!encoder_->IsLinearImage()); // Requires the major minor of "idealized/non-linear" images
472 const auto& subres_layout = subres_info_->layout;
473 const IndexType base = base_address_ + subres_layout.offset;
474 IndexType span = 0;
475 if (subres_range_.levelCount == encoder_->Limits().mipLevel) {
476 span = encoder_->GetAspectSize(aspect_index);
477 } else {
478 // Add up the mip sizes...
479 // Assumes subres_info is pointing to index(baseMipLevel, aspect_index)
480 // Assumes mip major order...
481 for (uint32_t level = 0; level < subres_range_.levelCount; level++) {
482 span += subres_info_[level].layout.size;
483 }
484 }
485 incr_mip_ = subres_range_.levelCount;
486 incr_state_.Set(1, 1, base, span, span, span);
487}
488
489void ImageRangeGenerator::SetInitialPosAllSubres(uint32_t layer, uint32_t aspect_index) {
490 assert(!encoder_->IsLinearImage());
491 const IndexType base = base_address_;
492 const IndexType span = encoder_->TotalSize();
493
494 // Just one range... everything, ++ will short circuit to "end"
495 single_full_size_range_ = true;
496 // We don't need to set up the rest of the incrementer, just the starting position
497 incr_state_.y_base = {base, base + span};
498}
499
500bool ImageRangeGenerator::Convert2DCompatibleTo3D() {
Aitor Camachoe67f2c72022-06-08 14:41:58 +0200501 if (encoder_->Is3D() && is_depth_sliced_) {
502 // This only valid for 2D compatible 3D images
503 // Touch up the extent and the subres to make this look like a depth extent
504 offset_.z = subres_range_.baseArrayLayer;
505 subres_range_.baseArrayLayer = 0;
506 extent_.depth = subres_range_.layerCount;
507 subres_range_.layerCount = 1;
508 return true;
John Zulauf110413c2021-03-20 05:38:38 -0600509 }
510 return false;
511}
512ImageRangeGenerator::ImageRangeGenerator(const ImageRangeEncoder& encoder, const VkImageSubresourceRange& subres_range,
Aitor Camachoe67f2c72022-06-08 14:41:58 +0200513 VkDeviceSize base_address, bool is_depth_sliced)
John Zulauf110413c2021-03-20 05:38:38 -0600514 : encoder_(&encoder),
515 subres_range_(GetRemaining(encoder.FullRange(), subres_range)),
516 offset_(),
517 extent_(),
Aitor Camachoe67f2c72022-06-08 14:41:58 +0200518 base_address_(base_address),
519 is_depth_sliced_(is_depth_sliced) {
John Zulauf110413c2021-03-20 05:38:38 -0600520#ifndef NDEBUG
521 assert(IsValid(*encoder_, subres_range_));
522#endif
523 if (SubresourceRangeIsEmpty(subres_range)) {
524 // Not robust to empty ranges, so for to "at end" condition.
525 pos_ = {0, 0};
526 return;
527 }
528
529 SetUpSubresInfo();
530 extent_ = subres_info_->extent;
531 const bool converted = Convert2DCompatibleTo3D();
532 SetUpIncrementerDefaults();
533 if (converted && (extent_.depth != subres_info_->extent.depth)) {
534 SetUpIncrementer(true, true, false);
535 } else {
536 SetUpSubresIncrementer();
537 }
538 SetInitialPos(subres_range_.baseArrayLayer, aspect_index_);
539 pos_ = incr_state_.y_base;
540}
541
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600542ImageRangeGenerator::ImageRangeGenerator(const ImageRangeEncoder& encoder, const VkImageSubresourceRange& subres_range,
Aitor Camachoe67f2c72022-06-08 14:41:58 +0200543 const VkOffset3D& offset, const VkExtent3D& extent, VkDeviceSize base_address,
544 bool is_depth_sliced)
John Zulauf150e5332020-12-03 08:52:52 -0700545 : encoder_(&encoder),
546 subres_range_(GetRemaining(encoder.FullRange(), subres_range)),
547 offset_(offset),
548 extent_(extent),
Aitor Camachoe67f2c72022-06-08 14:41:58 +0200549 base_address_(base_address),
550 is_depth_sliced_(is_depth_sliced) {
Nathaniel Cesario47bbe902021-02-10 16:27:37 -0700551#ifndef NDEBUG
locke-lunarg3c038002020-04-30 23:08:08 -0600552 assert(IsValid(*encoder_, subres_range_));
Nathaniel Cesario47bbe902021-02-10 16:27:37 -0700553#endif
John Zulauf110413c2021-03-20 05:38:38 -0600554
555 assert(subres_range_.levelCount == 1);
556 if (SubresourceRangeIsEmpty(subres_range)) {
John Zulauf555e88f2020-10-21 15:16:41 -0600557 // Empty range forces empty position -- no operations other than deref for empty check are valid
558 pos_ = {0, 0};
559 return;
560 }
561
John Zulauf110413c2021-03-20 05:38:38 -0600562 // When passing in an offset and extent, *must* only specify *one* mip level
563 SetUpSubresInfo();
564 Convert2DCompatibleTo3D();
565
566 const VkExtent3D& subres_extent = subres_info_->extent;
567 if (ExtentIsEmpty(extent_) || ((extent_.width + offset_.x) > subres_extent.width) ||
568 ((extent_.height + offset_.y) > subres_extent.height) || ((extent_.depth + offset_.z) > subres_extent.depth)) {
569 // Empty range forces empty position -- no operations other than deref for empty check are valid
570 pos_ = {0, 0};
571 return;
locke-lunargc93c4f02020-04-17 01:46:35 -0600572 }
John Zulauf110413c2021-03-20 05:38:38 -0600573
574 const bool all_width = (offset.x == 0) && (extent_.width == subres_extent.width);
575 const bool all_height = (offset.y == 0) && (extent_.height == subres_extent.height);
576 const bool all_depth = !encoder_->Is3D() || ((offset.z == 0) && (extent_.depth == subres_extent.depth));
577
578 SetUpIncrementerDefaults();
579 SetUpIncrementer(all_width, all_height, all_depth);
580 SetInitialPos(subres_range_.baseArrayLayer, aspect_index_);
581 pos_ = incr_state_.y_base;
locke-lunarg296a3c92020-03-25 01:04:29 -0600582}
583
John Zulauf110413c2021-03-20 05:38:38 -0600584void ImageRangeGenerator::SetUpSubresInfo() {
585 mip_index_ = 0;
586 aspect_index_ = encoder_->LowerBoundFromMask(subres_range_.aspectMask);
587 subres_index_ = encoder_->GetSubresourceIndex(aspect_index_, subres_range_.baseMipLevel);
588 subres_info_ = &encoder_->GetSubresourceInfo(subres_index_);
589}
locke-lunargc93c4f02020-04-17 01:46:35 -0600590
John Zulauf110413c2021-03-20 05:38:38 -0600591void ImageRangeGenerator::SetUpIncrementerDefaults() {
592 // These are safe defaults that most SetInitialPos* will use. Those that need to change them, do.
593 incr_state_.y_step = encoder_->TexelExtent().height;
594 incr_state_.layer_z_step = encoder_->Is3D() ? encoder_->TexelExtent().depth : 1U;
595 incr_mip_ = 1;
596 single_full_size_range_ = false;
597}
locke-lunargc93c4f02020-04-17 01:46:35 -0600598
John Zulauf110413c2021-03-20 05:38:38 -0600599// Assumes full extent in width/height/depth(if present)
600void ImageRangeGenerator::SetUpSubresIncrementer() {
601 const auto& full_range = encoder_->FullRange();
602 const bool linear_image = encoder_->IsLinearImage();
603 const bool is_3d = encoder_->Is3D();
604 const bool layers_interleave = linear_image && (subres_info_->layout.arrayPitch > subres_info_->layout.size);
605 if (layers_interleave) {
606 // The implementation can interleave arrays, aspects, and mips arbitrarily
607 if (encoder_->Is3D()) {
608 set_initial_pos_fn_ = &ImageRangeGenerator::SetInitialPosFullDepth;
locke-lunargc93c4f02020-04-17 01:46:35 -0600609 } else {
John Zulauf110413c2021-03-20 05:38:38 -0600610 set_initial_pos_fn_ = &ImageRangeGenerator::SetInitialPosFullHeight;
611 }
612 } else if (is_3d || CoversAllLayers(full_range, subres_range_)) {
613 if (!linear_image) {
614 // Linear images are defined by the implementation and so we can't assume the ordering we use here
615 bool all_mips = (subres_range_.baseMipLevel == 0) && (subres_range_.levelCount == full_range.levelCount);
616 bool all_aspects = subres_range_.aspectMask == full_range.aspectMask;
617 if (all_aspects && all_mips) {
618 set_initial_pos_fn_ = &ImageRangeGenerator::SetInitialPosAllSubres;
619 } else {
620 set_initial_pos_fn_ = &ImageRangeGenerator::SetInitialPosOneAspect;
621 }
622 } else if (is_3d) {
623 // 3D implies CoversAllLayers
624 set_initial_pos_fn_ = &ImageRangeGenerator::SetInitialPosFullDepth;
625 } else {
626 set_initial_pos_fn_ = &ImageRangeGenerator::SetInitialPosAllLayers;
locke-lunargc93c4f02020-04-17 01:46:35 -0600627 }
628 } else {
John Zulauf110413c2021-03-20 05:38:38 -0600629 set_initial_pos_fn_ = &ImageRangeGenerator::SetInitialPosSomeLayers;
locke-lunargc93c4f02020-04-17 01:46:35 -0600630 }
locke-lunarg296a3c92020-03-25 01:04:29 -0600631}
632
John Zulauf110413c2021-03-20 05:38:38 -0600633void ImageRangeGenerator::SetUpIncrementer(bool all_width, bool all_height, bool all_depth) {
634 if (!all_width || encoder_->IsInterleaveY()) {
635 // Dimensional majority is not guaranteed for Linear images except in X
636 // For tiled images we can use "idealized" addresses
637 set_initial_pos_fn_ = &ImageRangeGenerator::SetInitialPosFullOffset;
638 } else if (!all_height) {
639 set_initial_pos_fn_ = &ImageRangeGenerator::SetInitialPosFullWidth;
640 } else if (encoder_->Is3D() && !all_depth) {
641 set_initial_pos_fn_ = &ImageRangeGenerator::SetInitialPosSomeDepth;
642 } else {
643 SetUpSubresIncrementer();
644 }
645}
646
647ImageRangeGenerator& ImageRangeGenerator::operator++() {
John Zulauf110413c2021-03-20 05:38:38 -0600648 // Short circuit
649 if (single_full_size_range_) {
650 // Advance directly to end
651 pos_ = {0, 0};
652 return *this;
653 }
locke-lunarg296a3c92020-03-25 01:04:29 -0600654
John Zulauf110413c2021-03-20 05:38:38 -0600655 incr_state_.y_index += incr_state_.y_step;
656 if (incr_state_.y_index < incr_state_.y_count) {
657 incr_state_.y_base += incr_state_.incr_y;
658 pos_ = incr_state_.y_base;
locke-lunarg296a3c92020-03-25 01:04:29 -0600659 } else {
John Zulauf110413c2021-03-20 05:38:38 -0600660 incr_state_.layer_z_index += incr_state_.layer_z_step;
661 if (incr_state_.layer_z_index < incr_state_.layer_z_count) {
662 incr_state_.layer_z_base += incr_state_.incr_layer_z;
663 incr_state_.y_base = incr_state_.layer_z_base;
664 pos_ = incr_state_.y_base;
locke-lunarg5faaff52020-02-27 14:31:11 -0700665 } else {
John Zulauf110413c2021-03-20 05:38:38 -0600666 // For aspects and mips we need to move to a new subresource layer info
667 mip_index_ += incr_mip_;
668 if (mip_index_ < subres_range_.levelCount) {
669 // NOTE: This means that ImageRangeGenerator is relying on the major/minor ordering of mip and aspect in the
670 subres_index_ += incr_mip_;
671 extent_ = subres_info_->extent; // Overwrites input extent, but > 1 MIP isn't valid with input extent
locke-lunarg5faaff52020-02-27 14:31:11 -0700672 } else {
John Zulauf110413c2021-03-20 05:38:38 -0600673 const auto next_aspect_index = encoder_->LowerBoundFromMask(subres_range_.aspectMask, aspect_index_ + 1);
674 if (next_aspect_index < encoder_->Limits().aspect_index) {
675 // SubresourceLayout info in ImageRangeEncoder... it's a cheat, but it was a hotspot.
676 aspect_index_ = next_aspect_index;
677 mip_index_ = 0;
678 subres_index_ = encoder_->GetSubresourceIndex(aspect_index_, subres_range_.baseMipLevel);
locke-lunarg5faaff52020-02-27 14:31:11 -0700679 } else {
John Zulauf110413c2021-03-20 05:38:38 -0600680 // At End
locke-lunarg5faaff52020-02-27 14:31:11 -0700681 pos_ = {0, 0};
John Zulauf110413c2021-03-20 05:38:38 -0600682 return *this;
locke-lunarg5faaff52020-02-27 14:31:11 -0700683 }
684 }
John Zulauf110413c2021-03-20 05:38:38 -0600685
686 subres_info_ = &encoder_->GetSubresourceInfo(subres_index_);
687 SetInitialPos(subres_range_.baseArrayLayer, aspect_index_);
688 pos_ = incr_state_.y_base;
locke-lunarg5faaff52020-02-27 14:31:11 -0700689 }
locke-lunarg5faaff52020-02-27 14:31:11 -0700690 }
John Zulauf110413c2021-03-20 05:38:38 -0600691
692 return *this;
locke-lunarg5faaff52020-02-27 14:31:11 -0700693}
694
John Zulauf11211402019-11-15 14:02:36 -0700695template <typename AspectTraits>
696class AspectParametersImpl : public AspectParameters {
697 public:
698 VkImageAspectFlags AspectMask() const override { return AspectTraits::kAspectMask; }
699 MaskIndexFunc MaskToIndexFunction() const override { return &AspectTraits::MaskIndex; }
700 uint32_t AspectCount() const override { return AspectTraits::kAspectCount; };
701 const VkImageAspectFlagBits* AspectBits() const override { return AspectTraits::AspectBits().data(); }
702};
703
704struct NullAspectTraits {
705 static constexpr uint32_t kAspectCount = 0;
706 static constexpr VkImageAspectFlags kAspectMask = 0;
707 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
708 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700709 static std::array<VkImageAspectFlagBits, kAspectCount> k_aspect_bits{};
710 return k_aspect_bits;
John Zulauf11211402019-11-15 14:02:36 -0700711 }
712};
713
714struct ColorAspectTraits {
715 static constexpr uint32_t kAspectCount = 1;
716 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
717 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
718 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700719 static std::array<VkImageAspectFlagBits, kAspectCount> k_aspect_bits{{VK_IMAGE_ASPECT_COLOR_BIT}};
720 return k_aspect_bits;
John Zulauf11211402019-11-15 14:02:36 -0700721 }
722};
723
724struct DepthAspectTraits {
725 static constexpr uint32_t kAspectCount = 1;
726 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
727 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
728 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700729 static std::array<VkImageAspectFlagBits, kAspectCount> k_aspect_bits{{VK_IMAGE_ASPECT_DEPTH_BIT}};
730 return k_aspect_bits;
John Zulauf11211402019-11-15 14:02:36 -0700731 }
732};
733
734struct StencilAspectTraits {
735 static constexpr uint32_t kAspectCount = 1;
736 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
737 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
738 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700739 static std::array<VkImageAspectFlagBits, kAspectCount> k_aspect_bits{{VK_IMAGE_ASPECT_STENCIL_BIT}};
740 return k_aspect_bits;
John Zulauf11211402019-11-15 14:02:36 -0700741 }
742};
743
744struct DepthStencilAspectTraits {
745 // VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, >> 1 -> 1 -1 -> 0
746 // VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, >> 1 -> 2 -1 = 1
747 static constexpr uint32_t kAspectCount = 2;
748 static constexpr VkImageAspectFlags kAspectMask = (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
749 static uint32_t MaskIndex(VkImageAspectFlags mask) {
750 uint32_t index = (mask >> 1) - 1;
751 assert((index == 0) || (index == 1));
752 return index;
753 };
754 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700755 static std::array<VkImageAspectFlagBits, kAspectCount> k_aspect_bits{
John Zulauf11211402019-11-15 14:02:36 -0700756 {VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_ASPECT_STENCIL_BIT}};
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700757 return k_aspect_bits;
John Zulauf11211402019-11-15 14:02:36 -0700758 }
759};
760
761struct Multiplane2AspectTraits {
762 // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
763 // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
764 static constexpr uint32_t kAspectCount = 2;
765 static constexpr VkImageAspectFlags kAspectMask = (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
766 static uint32_t MaskIndex(VkImageAspectFlags mask) {
767 uint32_t index = (mask >> 4) - 1;
768 assert((index == 0) || (index == 1));
769 return index;
770 };
771 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700772 static std::array<VkImageAspectFlagBits, kAspectCount> k_aspect_bits{
John Zulauf11211402019-11-15 14:02:36 -0700773 {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT}};
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700774 return k_aspect_bits;
John Zulauf11211402019-11-15 14:02:36 -0700775 }
776};
777
778struct Multiplane3AspectTraits {
779 // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
780 // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
781 // VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, >> 4 - 1 -> 3
782 static constexpr uint32_t kAspectCount = 3;
783 static constexpr VkImageAspectFlags kAspectMask =
784 (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT | VK_IMAGE_ASPECT_PLANE_2_BIT);
785 static uint32_t MaskIndex(VkImageAspectFlags mask) {
786 uint32_t index = (mask >> 4) - 1;
787 index = index > 2 ? 2 : index;
788 assert((index == 0) || (index == 1) || (index == 2));
789 return index;
790 };
791 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700792 static std::array<VkImageAspectFlagBits, kAspectCount> k_aspect_bits{
John Zulauf11211402019-11-15 14:02:36 -0700793 {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, VK_IMAGE_ASPECT_PLANE_2_BIT}};
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700794 return k_aspect_bits;
John Zulauf11211402019-11-15 14:02:36 -0700795 }
796};
797
798// Create the encoder parameter suitable to the full range aspect mask (*must* be canonical)
799const AspectParameters* AspectParameters::Get(VkImageAspectFlags aspect_mask) {
800 // We need a persitent instance of each specialist containing only a VTABLE each
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700801 static const AspectParametersImpl<ColorAspectTraits> k_color_param;
802 static const AspectParametersImpl<DepthAspectTraits> k_depth_param;
803 static const AspectParametersImpl<StencilAspectTraits> k_stencil_param;
804 static const AspectParametersImpl<DepthStencilAspectTraits> k_depth_stencil_param;
805 static const AspectParametersImpl<Multiplane2AspectTraits> k_mutliplane2_param;
806 static const AspectParametersImpl<Multiplane3AspectTraits> k_mutliplane3_param;
807 static const AspectParametersImpl<NullAspectTraits> k_null_aspect;
John Zulauf11211402019-11-15 14:02:36 -0700808
809 const AspectParameters* param;
810 switch (aspect_mask) {
811 case ColorAspectTraits::kAspectMask:
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700812 param = &k_color_param;
John Zulauf11211402019-11-15 14:02:36 -0700813 break;
814 case DepthAspectTraits::kAspectMask:
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700815 param = &k_depth_param;
John Zulauf11211402019-11-15 14:02:36 -0700816 break;
817 case StencilAspectTraits::kAspectMask:
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700818 param = &k_stencil_param;
John Zulauf11211402019-11-15 14:02:36 -0700819 break;
820 case DepthStencilAspectTraits::kAspectMask:
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700821 param = &k_depth_stencil_param;
John Zulauf11211402019-11-15 14:02:36 -0700822 break;
823 case Multiplane2AspectTraits::kAspectMask:
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700824 param = &k_mutliplane2_param;
John Zulauf11211402019-11-15 14:02:36 -0700825 break;
826 case Multiplane3AspectTraits::kAspectMask:
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700827 param = &k_mutliplane3_param;
John Zulauf11211402019-11-15 14:02:36 -0700828 break;
829 default:
830 assert(false);
Nathaniel Cesarioce9b4812020-12-17 08:55:28 -0700831 param = &k_null_aspect;
John Zulauf11211402019-11-15 14:02:36 -0700832 }
833 return param;
834}
835
John Zulauf110413c2021-03-20 05:38:38 -0600836inline ImageRangeEncoder::SubresInfo::SubresInfo(const VkSubresourceLayout& layout_, const VkExtent3D& extent_,
837 const VkExtent3D& texel_extent, double texel_size)
838 : layout(layout_),
839 extent(extent_),
840 y_step_pitch(layout.rowPitch * texel_extent.height),
841 z_step_pitch(layout.depthPitch * texel_extent.depth),
842 layer_span(layout.rowPitch * extent_.height) {}
843
844void ImageRangeGenerator::IncrementerState::Set(uint32_t y_count_, uint32_t layer_z_count_, IndexType base, IndexType span,
845 IndexType y_step, IndexType z_step) {
846 y_count = y_count_;
847 layer_z_count = layer_z_count_;
848 y_index = 0;
849 layer_z_index = 0;
850 y_base.begin = base;
851 y_base.end = base + span;
852 layer_z_base = y_base;
853 incr_y = y_step;
854 incr_layer_z = z_step;
855}
856
John Zulauf11211402019-11-15 14:02:36 -0700857}; // namespace subresource_adapter