blob: 4bdd07cdb0e7c270e73f01a0312a2dcc6e2c9c72 [file] [log] [blame]
John Zulauf86ce1cf2020-01-23 12:27:01 -07001/* Copyright (c) 2019-2020 The Khronos Group Inc.
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
4 * Copyright (C) 2019-2020 Google Inc.
John Zulauf11211402019-11-15 14:02:36 -07005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * John Zulauf <jzulauf@lunarg.com>
19 *
20 */
21#include <cassert>
22#include "subresource_adapter.h"
locke-lunarg296a3c92020-03-25 01:04:29 -060023#include "vk_format_utils.h"
24#include "state_tracker.h"
25#include "core_validation_types.h"
locke-lunargf9293012020-04-16 17:01:23 -060026#include <cmath>
John Zulauf11211402019-11-15 14:02:36 -070027
28namespace subresource_adapter {
John Zulauf2ea823e2019-11-19 08:54:59 -070029Subresource::Subresource(const RangeEncoder& encoder, const VkImageSubresource& subres)
30 : VkImageSubresource({0, subres.mipLevel, subres.arrayLayer}), aspect_index() {
31 aspect_index = encoder.LowerBoundFromMask(subres.aspectMask);
32 aspectMask = encoder.AspectBit(aspect_index);
33}
34
35IndexType RangeEncoder::Encode1AspectArrayOnly(const Subresource& pos) const { return pos.arrayLayer; }
36IndexType RangeEncoder::Encode1AspectMipArray(const Subresource& pos) const { return pos.arrayLayer + pos.mipLevel * mip_size_; }
37IndexType RangeEncoder::Encode1AspectMipOnly(const Subresource& pos) const { return pos.mipLevel; }
John Zulauf11211402019-11-15 14:02:36 -070038
39IndexType RangeEncoder::EncodeAspectArrayOnly(const Subresource& pos) const {
John Zulauf2ea823e2019-11-19 08:54:59 -070040 return pos.arrayLayer + aspect_base_[pos.aspect_index];
John Zulauf11211402019-11-15 14:02:36 -070041}
42IndexType RangeEncoder::EncodeAspectMipArray(const Subresource& pos) const {
John Zulauf2ea823e2019-11-19 08:54:59 -070043 return pos.arrayLayer + pos.mipLevel * mip_size_ + aspect_base_[pos.aspect_index];
John Zulauf11211402019-11-15 14:02:36 -070044}
John Zulauf2ea823e2019-11-19 08:54:59 -070045IndexType RangeEncoder::EncodeAspectMipOnly(const Subresource& pos) const { return pos.mipLevel + aspect_base_[pos.aspect_index]; }
John Zulauf11211402019-11-15 14:02:36 -070046
John Zulauf2ea823e2019-11-19 08:54:59 -070047uint32_t RangeEncoder::LowerBoundImpl1(VkImageAspectFlags aspect_mask) const {
48 assert(aspect_mask & aspect_bits_[0]);
49 return 0;
50}
51uint32_t RangeEncoder::LowerBoundWithStartImpl1(VkImageAspectFlags aspect_mask, uint32_t start) const {
52 assert(start == 0);
53 if (aspect_mask & aspect_bits_[0]) {
54 return 0;
55 }
56 return limits_.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -070057}
58
John Zulauf2ea823e2019-11-19 08:54:59 -070059uint32_t RangeEncoder::LowerBoundImpl2(VkImageAspectFlags aspect_mask) const {
60 if (aspect_mask & aspect_bits_[0]) {
61 return 0;
62 }
63 assert(aspect_mask & aspect_bits_[1]);
64 return 1;
John Zulauf11211402019-11-15 14:02:36 -070065}
John Zulauf2ea823e2019-11-19 08:54:59 -070066uint32_t RangeEncoder::LowerBoundWithStartImpl2(VkImageAspectFlags aspect_mask, uint32_t start) const {
John Zulauf5823c622019-11-25 13:33:44 -070067 switch (start) {
68 case 0:
69 if (aspect_mask & aspect_bits_[0]) {
70 return 0;
71 }
72 // no break
73 case 1:
74 if (aspect_mask & aspect_bits_[1]) {
75 return 1;
76 }
77 break;
78 default:
79 break;
John Zulauf2ea823e2019-11-19 08:54:59 -070080 }
81 return limits_.aspect_index;
82}
83
84uint32_t RangeEncoder::LowerBoundImpl3(VkImageAspectFlags aspect_mask) const {
85 if (aspect_mask & aspect_bits_[0]) {
86 return 0;
87 } else if (aspect_mask & aspect_bits_[1]) {
88 return 1;
89 } else {
90 assert(aspect_mask & aspect_bits_[2]);
91 return 2;
92 }
93}
94
95uint32_t RangeEncoder::LowerBoundWithStartImpl3(VkImageAspectFlags aspect_mask, uint32_t start) const {
John Zulauf5823c622019-11-25 13:33:44 -070096 switch (start) {
97 case 0:
98 if (aspect_mask & aspect_bits_[0]) {
99 return 0;
100 }
101 // no break
102 case 1:
103 if ((aspect_mask & aspect_bits_[1])) {
104 return 1;
105 }
106 // no break
107 case 2:
108 if ((aspect_mask & aspect_bits_[2])) {
109 return 2;
110 }
111 break;
112 default:
113 break;
John Zulauf2ea823e2019-11-19 08:54:59 -0700114 }
115 return limits_.aspect_index;
116}
117
John Zulauf11211402019-11-15 14:02:36 -0700118void RangeEncoder::PopulateFunctionPointers() {
119 // Select the encode/decode specialists
120 if (limits_.aspect_index == 1) {
121 // One aspect use simplified encode/decode math
John Zulauf2ea823e2019-11-19 08:54:59 -0700122 if (limits_.arrayLayer == 1) { // Same as mip_size_ == 1
John Zulauf11211402019-11-15 14:02:36 -0700123 encode_function_ = &RangeEncoder::Encode1AspectMipOnly;
124 decode_function_ = &RangeEncoder::DecodeAspectMipOnly<1>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700125 } else if (limits_.mipLevel == 1) {
John Zulauf11211402019-11-15 14:02:36 -0700126 encode_function_ = &RangeEncoder::Encode1AspectArrayOnly;
127 decode_function_ = &RangeEncoder::DecodeAspectArrayOnly<1>;
128 } else {
129 encode_function_ = &RangeEncoder::Encode1AspectMipArray;
130 decode_function_ = &RangeEncoder::DecodeAspectMipArray<1>;
131 }
John Zulauf2ea823e2019-11-19 08:54:59 -0700132 lower_bound_function_ = &RangeEncoder::LowerBoundImpl1;
133 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl1;
John Zulauf11211402019-11-15 14:02:36 -0700134 } else if (limits_.aspect_index == 2) {
135 // Two aspect use simplified encode/decode math
John Zulauf2ea823e2019-11-19 08:54:59 -0700136 if (limits_.arrayLayer == 1) { // Same as mip_size_ == 1
John Zulauf11211402019-11-15 14:02:36 -0700137 encode_function_ = &RangeEncoder::EncodeAspectMipOnly;
138 decode_function_ = &RangeEncoder::DecodeAspectMipOnly<2>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700139 } else if (limits_.mipLevel == 1) {
John Zulauf11211402019-11-15 14:02:36 -0700140 encode_function_ = &RangeEncoder::EncodeAspectArrayOnly;
141 decode_function_ = &RangeEncoder::DecodeAspectArrayOnly<2>;
142 } else {
143 encode_function_ = &RangeEncoder::EncodeAspectMipArray;
144 decode_function_ = &RangeEncoder::DecodeAspectMipArray<2>;
145 }
John Zulauf2ea823e2019-11-19 08:54:59 -0700146 lower_bound_function_ = &RangeEncoder::LowerBoundImpl2;
147 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl2;
John Zulauf11211402019-11-15 14:02:36 -0700148 } else {
149 encode_function_ = &RangeEncoder::EncodeAspectMipArray;
150 decode_function_ = &RangeEncoder::DecodeAspectMipArray<3>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700151 lower_bound_function_ = &RangeEncoder::LowerBoundImpl3;
152 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl3;
John Zulauf11211402019-11-15 14:02:36 -0700153 }
154
155 // Initialize the offset array
156 aspect_base_[0] = 0;
157 for (uint32_t i = 1; i < limits_.aspect_index; ++i) {
158 aspect_base_[i] = aspect_base_[i - 1] + aspect_size_;
159 }
160}
John Zulauf11211402019-11-15 14:02:36 -0700161
162RangeEncoder::RangeEncoder(const VkImageSubresourceRange& full_range, const AspectParameters* param)
locke-lunarg296a3c92020-03-25 01:04:29 -0600163 : limits_(param->AspectMask(), full_range.levelCount, full_range.layerCount, param->AspectCount()),
164 full_range_(full_range),
John Zulauf11211402019-11-15 14:02:36 -0700165 mip_size_(full_range.layerCount),
166 aspect_size_(mip_size_ * full_range.levelCount),
167 aspect_bits_(param->AspectBits()),
168 mask_index_function_(param->MaskToIndexFunction()),
169 encode_function_(nullptr),
170 decode_function_(nullptr) {
171 // Only valid to create an encoder for a *whole* image (i.e. base must be zero, and the specified limits_.selected_aspects
172 // *must* be equal to the traits aspect mask. (Encoder range assumes zero bases)
John Zulauf2ea823e2019-11-19 08:54:59 -0700173 assert(full_range.aspectMask == limits_.aspectMask);
John Zulauf11211402019-11-15 14:02:36 -0700174 assert(full_range.baseArrayLayer == 0);
175 assert(full_range.baseMipLevel == 0);
176 // TODO: should be some static assert
177 assert(param->AspectCount() <= kMaxSupportedAspect);
178 PopulateFunctionPointers();
179}
180
locke-lunarg5faaff52020-02-27 14:31:11 -0700181static bool IsValid(const RangeEncoder& encoder, const VkImageSubresourceRange& bounds) {
182 const auto& limits = encoder.Limits();
183 return (((bounds.aspectMask & limits.aspectMask) == bounds.aspectMask) &&
184 (bounds.baseMipLevel + bounds.levelCount <= limits.mipLevel) &&
185 (bounds.baseArrayLayer + bounds.layerCount <= limits.arrayLayer));
186}
187
John Zulauf11211402019-11-15 14:02:36 -0700188// Create an iterator like "generator" that for each increment produces the next index range matching the
189// next contiguous (in index space) section of the VkImageSubresourceRange
190// Ranges will always span the layerCount layers, and if the layerCount is the full range of the image (as known by
191// the encoder) will span the levelCount mip levels as weill.
John Zulauf11211402019-11-15 14:02:36 -0700192RangeGenerator::RangeGenerator(const RangeEncoder& encoder, const VkImageSubresourceRange& subres_range)
193 : encoder_(&encoder), isr_pos_(encoder, subres_range), pos_(), aspect_base_() {
Mark Lobodzinskibb279b92020-05-08 13:03:52 -0600194 assert((((isr_pos_.Limits()).aspectMask & (encoder.Limits()).aspectMask) == (isr_pos_.Limits()).aspectMask) &&
195 ((isr_pos_.Limits()).baseMipLevel + (isr_pos_.Limits()).levelCount <= (encoder.Limits()).mipLevel) &&
196 ((isr_pos_.Limits()).baseArrayLayer + (isr_pos_.Limits()).layerCount <= (encoder.Limits()).arrayLayer));
John Zulauf11211402019-11-15 14:02:36 -0700197
John Zulauf2ea823e2019-11-19 08:54:59 -0700198 // To see if we have a full range special case, need to compare the subres_range against the *encoders* limits
199 const auto& limits = encoder.Limits();
200 if ((subres_range.baseArrayLayer == 0 && subres_range.layerCount == limits.arrayLayer)) {
201 if ((subres_range.baseMipLevel == 0) && (subres_range.levelCount == limits.mipLevel)) {
202 if (subres_range.aspectMask == limits.aspectMask) {
203 // Full range
204 pos_.begin = 0;
205 pos_.end = encoder.AspectSize() * limits.aspect_index;
206 aspect_count_ = 1; // Flag this to never advance aspects.
207 } else {
208 // All mips all layers but not all aspect
209 pos_.begin = encoder.AspectBase(isr_pos_.aspect_index);
210 pos_.end = pos_.begin + encoder.AspectSize();
211 aspect_count_ = limits.aspect_index;
212 }
213 } else {
214 // All array layers, but not all levels
215 pos_.begin = encoder.AspectBase(isr_pos_.aspect_index) + subres_range.baseMipLevel * encoder.MipSize();
216 pos_.end = pos_.begin + subres_range.levelCount * encoder.MipSize();
217 aspect_count_ = limits.aspect_index;
218 }
219
John Zulauf11211402019-11-15 14:02:36 -0700220 // Full set of array layers at a time, thus we can span across all selected mip levels
John Zulauf11211402019-11-15 14:02:36 -0700221 mip_count_ = 1; // we don't ever advance across mips, as we do all of then in one range
222 } else {
223 // Each range covers all included array_layers for each selected mip_level for each given selected aspect
John Zulauf2ea823e2019-11-19 08:54:59 -0700224 // so we'll use the general purpose encode and smallest range size
225 pos_.begin = encoder.Encode(isr_pos_);
226 pos_.end = pos_.begin + subres_range.layerCount;
227
John Zulauf11211402019-11-15 14:02:36 -0700228 // we do have to traverse across mips, though (other than Encode abover), we don't have to know which one we are on.
229 mip_count_ = subres_range.levelCount;
John Zulauf2ea823e2019-11-19 08:54:59 -0700230 aspect_count_ = limits.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -0700231 }
232
John Zulauf2ea823e2019-11-19 08:54:59 -0700233 // To get to the next aspect range we offset from the last base
John Zulauf11211402019-11-15 14:02:36 -0700234 aspect_base_ = pos_;
235 mip_index_ = 0;
John Zulauf2ea823e2019-11-19 08:54:59 -0700236 aspect_index_ = isr_pos_.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -0700237}
238
239RangeGenerator& RangeGenerator::operator++() {
240 mip_index_++;
241 // NOTE: If all selected mip levels are done at once, mip_count_ is set to one, not the number of selected mip_levels
242 if (mip_index_ >= mip_count_) {
John Zulauf2ea823e2019-11-19 08:54:59 -0700243 const auto last_aspect_index = aspect_index_;
244 // Seek the next value aspect (if any)
245 aspect_index_ = encoder_->LowerBoundFromMask(isr_pos_.Limits().aspectMask, aspect_index_ + 1);
246 if (aspect_index_ < aspect_count_) {
247 // Force isr_pos to the beginning of this found aspect
John Zulaufdd18b3a2019-11-20 08:30:23 -0700248 isr_pos_.SeekAspect(aspect_index_);
John Zulauf2ea823e2019-11-19 08:54:59 -0700249 // SubresourceGenerator should never be at tombstones we we aren't
250 assert(isr_pos_.aspectMask != 0);
251
252 // Offset by the distance between the last start of aspect and *this* start of aspect
253 aspect_base_ += (encoder_->AspectBase(isr_pos_.aspect_index) - encoder_->AspectBase(last_aspect_index));
John Zulauf11211402019-11-15 14:02:36 -0700254 pos_ = aspect_base_;
255 mip_index_ = 0;
256 } else {
John Zulauf2ea823e2019-11-19 08:54:59 -0700257 // Tombstone both index range and subresource positions to "At end" convention
John Zulauf11211402019-11-15 14:02:36 -0700258 pos_ = {0, 0};
John Zulauf2ea823e2019-11-19 08:54:59 -0700259 isr_pos_.aspectMask = 0;
John Zulauf11211402019-11-15 14:02:36 -0700260 }
John Zulauf11211402019-11-15 14:02:36 -0700261 } else {
262 // Note: for the layerCount < full_range.layerCount case, because the generated ranges per mip_level are discontinuous
John Zulauf2ea823e2019-11-19 08:54:59 -0700263 // we have to do each individual array of ranges
John Zulauf11211402019-11-15 14:02:36 -0700264 pos_ += encoder_->MipSize();
John Zulaufdd18b3a2019-11-20 08:30:23 -0700265 isr_pos_.SeekMip(isr_pos_.Limits().baseMipLevel + mip_index_);
John Zulauf11211402019-11-15 14:02:36 -0700266 }
267 return *this;
268}
269
locke-lunargae26eac2020-04-16 15:29:05 -0600270ImageRangeEncoder::ImageRangeEncoder(const IMAGE_STATE& image)
271 : ImageRangeEncoder(image, AspectParameters::Get(image.full_range.aspectMask)) {}
locke-lunarg5faaff52020-02-27 14:31:11 -0700272
locke-lunargae26eac2020-04-16 15:29:05 -0600273ImageRangeEncoder::ImageRangeEncoder(const IMAGE_STATE& image, const AspectParameters* param)
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600274 : RangeEncoder(image.full_range, param), image_(&image) {
locke-lunarg296a3c92020-03-25 01:04:29 -0600275 VkSubresourceLayout layout = {};
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600276 VkImageSubresource subres = {};
277 VkExtent2D divisors = {};
locke-lunarg296a3c92020-03-25 01:04:29 -0600278 VkImageSubresourceLayers subres_layers = {limits_.aspectMask, 0, 0, limits_.arrayLayer};
locke-lunarg3f6978b2020-04-16 16:51:35 -0600279 linear_image = false;
280
281 // WORKAROUND for dev_sim and mock_icd not containing valid VkSubresourceLayout yet. Treat it as optimal image.
282 if (image_->createInfo.tiling != VK_IMAGE_TILING_OPTIMAL) {
283 subres = {static_cast<VkImageAspectFlags>(AspectBit(0)), 0, 0};
284 DispatchGetImageSubresourceLayout(image_->store_device_as_workaround, image_->image, &subres, &layout);
285 if (layout.size > 0) {
286 linear_image = true;
287 }
288 }
locke-lunarg5faaff52020-02-27 14:31:11 -0700289
locke-lunarg296a3c92020-03-25 01:04:29 -0600290 for (uint32_t mip_index = 0; mip_index < limits_.mipLevel; ++mip_index) {
291 subres_layers.mipLevel = mip_index;
292 auto subres_extent = GetImageSubresourceExtent(image_, &subres_layers);
293 subres_extents_.push_back(subres_extent);
locke-lunarg3f6978b2020-04-16 16:51:35 -0600294 subres.mipLevel = mip_index;
locke-lunarg296a3c92020-03-25 01:04:29 -0600295 for (uint32_t aspect_index = 0; aspect_index < limits_.aspect_index; ++aspect_index) {
locke-lunarg3f6978b2020-04-16 16:51:35 -0600296 subres.aspectMask = static_cast<VkImageAspectFlags>(AspectBit(aspect_index));
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600297 if (mip_index == 0) {
locke-lunargf9293012020-04-16 17:01:23 -0600298 texel_sizes_.push_back(FormatTexelSize(image.createInfo.format, subres.aspectMask));
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600299 }
locke-lunarg3f6978b2020-04-16 16:51:35 -0600300 if (linear_image) {
301 DispatchGetImageSubresourceLayout(image_->store_device_as_workaround, image_->image, &subres, &layout);
302 subres_layouts_.push_back(layout);
303 } else {
304 divisors = FindMultiplaneExtentDivisors(image.createInfo.format, subres.aspectMask);
305 layout.offset += layout.size;
locke-lunargf9293012020-04-16 17:01:23 -0600306 layout.rowPitch =
307 static_cast<VkDeviceSize>(ceil(subres_extent.width * texel_sizes_[aspect_index] / divisors.width));
locke-lunarg3f6978b2020-04-16 16:51:35 -0600308 layout.arrayPitch = layout.rowPitch * subres_extent.height / divisors.height;
309 layout.depthPitch = layout.arrayPitch;
310 layout.size = layout.arrayPitch * limits_.arrayLayer;
311 subres_layouts_.push_back(layout);
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600312 }
locke-lunarg5faaff52020-02-27 14:31:11 -0700313 }
locke-lunarg5faaff52020-02-27 14:31:11 -0700314 }
locke-lunarg5faaff52020-02-27 14:31:11 -0700315}
316
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600317IndexType ImageRangeEncoder::Encode(const VkImageSubresource& subres, uint32_t layer, VkOffset3D offset) const {
318 const auto& subres_layout = SubresourceLayout(subres);
locke-lunargf9293012020-04-16 17:01:23 -0600319 return static_cast<IndexType>(ceil(layer * subres_layout.arrayPitch + offset.z * subres_layout.depthPitch +
320 offset.y * subres_layout.rowPitch +
321 offset.x * texel_sizes_[LowerBoundFromMask(subres.aspectMask)] + subres_layout.offset));
locke-lunarg296a3c92020-03-25 01:04:29 -0600322}
locke-lunarg5faaff52020-02-27 14:31:11 -0700323
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600324void ImageRangeEncoder::Decode(const VkImageSubresource& subres, const IndexType& encode, uint32_t& out_layer,
325 VkOffset3D& out_offset) const {
326 const auto& subres_layout = SubresourceLayout(subres);
327 IndexType decode = encode - subres_layout.offset;
328 out_layer = static_cast<uint32_t>(decode / subres_layout.arrayPitch);
329 decode -= (out_layer * subres_layout.arrayPitch);
330 out_offset.z = static_cast<int32_t>(decode / subres_layout.depthPitch);
331 decode -= (out_offset.z * subres_layout.depthPitch);
332 out_offset.y = static_cast<int32_t>(decode / subres_layout.rowPitch);
333 decode -= (out_offset.y * subres_layout.rowPitch);
locke-lunargf9293012020-04-16 17:01:23 -0600334 out_offset.x = static_cast<int32_t>(decode / texel_sizes_[LowerBoundFromMask(subres.aspectMask)]);
locke-lunarg296a3c92020-03-25 01:04:29 -0600335}
336
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600337const VkSubresourceLayout& ImageRangeEncoder::SubresourceLayout(const VkImageSubresource& subres) const {
338 uint32_t subres_layouts_index = subres.mipLevel * limits_.aspect_index + LowerBoundFromMask(subres.aspectMask);
339 return subres_layouts_[subres_layouts_index];
locke-lunarg296a3c92020-03-25 01:04:29 -0600340}
341
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600342ImageRangeGenerator::ImageRangeGenerator(const ImageRangeEncoder& encoder, const VkImageSubresourceRange& subres_range,
locke-lunarg296a3c92020-03-25 01:04:29 -0600343 const VkOffset3D& offset, const VkExtent3D& extent)
344 : encoder_(&encoder), subres_range_(subres_range), offset_(offset), extent_(extent) {
345 assert(IsValid(*encoder_, subres_range));
346 mip_level_index_ = 0;
347 aspect_index_ = encoder_->LowerBoundFromMask(subres_range.aspectMask);
locke-lunargc93c4f02020-04-17 01:46:35 -0600348 if ((offset_.z + extent_.depth) == 1) {
349 range_arraylayer_base_ = subres_range.baseArrayLayer;
350 range_layer_count_ = subres_range_.layerCount;
351 } else {
352 range_arraylayer_base_ = offset_.z;
353 range_layer_count_ = extent_.depth;
354 }
locke-lunarg296a3c92020-03-25 01:04:29 -0600355 SetPos();
356}
357
358void ImageRangeGenerator::SetPos() {
locke-lunargc93c4f02020-04-17 01:46:35 -0600359 VkImageSubresource subres = {static_cast<VkImageAspectFlags>(encoder_->AspectBit(aspect_index_)),
360 subres_range_.baseMipLevel + mip_level_index_,
361 subres_range_.baseArrayLayer};
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600362 subres_layout_ = &(encoder_->SubresourceLayout(subres));
locke-lunargc93c4f02020-04-17 01:46:35 -0600363 const VkExtent3D& subres_extent = encoder_->SubresourceExtent(subres.mipLevel);
364 Subresource limits = encoder_->Limits();
365
366 offset_y_count_ = static_cast<int32_t>(extent_.height);
367 layer_count_ = range_layer_count_;
368 mip_count_ = subres_range_.levelCount;
369 aspect_count_ = limits.aspect_index;
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600370 pos_.begin = encoder_->Encode(subres, subres_range_.baseArrayLayer, offset_);
locke-lunargc93c4f02020-04-17 01:46:35 -0600371 pos_.end = pos_.begin;
372
373 if (offset_.x == 0 && extent_.width == subres_extent.width) {
374 if (offset_.y == 0 && extent_.height == subres_extent.height) {
375 offset_y_count_ = 1;
376 if (range_arraylayer_base_ == 0 && range_layer_count_ == limits.arrayLayer) {
377 layer_count_ = 1;
378 if (subres_range_.baseMipLevel == 0 && subres_range_.levelCount == limits.mipLevel) {
379 mip_count_ = 1;
380 for (uint32_t aspect_index = aspect_index_; aspect_index < aspect_count_;) {
381 subres.aspectMask = static_cast<VkImageAspectFlags>(encoder_->AspectBit(aspect_index));
382 for (uint32_t mip_index = 0; mip_index < limits.mipLevel; ++mip_index) {
383 subres.mipLevel = mip_index;
384 const VkSubresourceLayout& subres_layout = encoder_->SubresourceLayout(subres);
385 pos_.end += subres_layout.size;
386 }
387 aspect_index = encoder_->LowerBoundFromMask(subres_range_.aspectMask, aspect_index + 1);
388 }
389 aspect_count_ = 1;
390 } else {
391 for (uint32_t mip_index = mip_level_index_; mip_index < subres_range_.levelCount; ++mip_index) {
392 const VkSubresourceLayout& subres_layout = encoder_->SubresourceLayout(subres);
393 pos_.end += subres_layout.size;
394 subres.mipLevel++;
395 }
396 }
397 } else {
398 pos_.end += subres_layout_->arrayPitch * range_layer_count_;
399 }
400 } else {
401 pos_.end += (subres_layout_->rowPitch * offset_y_count_);
402 }
403 } else {
404 pos_.end += static_cast<IndexType>(ceil(encoder_->TexelSize(aspect_index_) * extent_.width));
405 }
locke-lunarg296a3c92020-03-25 01:04:29 -0600406 offset_layer_base_ = pos_;
407 offset_offset_y_base_ = pos_;
408 arrayLayer_index_ = 0;
409 offset_y_index_ = 0;
locke-lunarg296a3c92020-03-25 01:04:29 -0600410}
411
412ImageRangeGenerator* ImageRangeGenerator::operator++() {
413 offset_y_index_++;
414
415 if (offset_y_index_ < offset_y_count_) {
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600416 offset_offset_y_base_ += subres_layout_->rowPitch;
locke-lunarg296a3c92020-03-25 01:04:29 -0600417 pos_ = offset_offset_y_base_;
418 } else {
419 offset_y_index_ = 0;
420 arrayLayer_index_++;
421 if (arrayLayer_index_ < layer_count_) {
locke-lunarg5f7d3c62020-04-07 00:10:39 -0600422 offset_layer_base_ += subres_layout_->arrayPitch;
locke-lunarg296a3c92020-03-25 01:04:29 -0600423 offset_offset_y_base_ = offset_layer_base_;
424 pos_ = offset_layer_base_;
locke-lunarg5faaff52020-02-27 14:31:11 -0700425 } else {
locke-lunarg296a3c92020-03-25 01:04:29 -0600426 arrayLayer_index_ = 0;
427 mip_level_index_++;
428 if (mip_level_index_ < subres_range_.levelCount) {
429 SetPos();
locke-lunarg5faaff52020-02-27 14:31:11 -0700430 } else {
locke-lunarg296a3c92020-03-25 01:04:29 -0600431 mip_level_index_ = 0;
432 aspect_index_ = encoder_->LowerBoundFromMask(subres_range_.aspectMask, aspect_index_ + 1);
433 if (aspect_index_ < aspect_count_) {
434 SetPos();
locke-lunarg5faaff52020-02-27 14:31:11 -0700435 } else {
locke-lunarg296a3c92020-03-25 01:04:29 -0600436 // End
locke-lunarg5faaff52020-02-27 14:31:11 -0700437 pos_ = {0, 0};
locke-lunarg5faaff52020-02-27 14:31:11 -0700438 }
439 }
440 }
locke-lunarg5faaff52020-02-27 14:31:11 -0700441 }
locke-lunarg296a3c92020-03-25 01:04:29 -0600442 return this;
locke-lunarg5faaff52020-02-27 14:31:11 -0700443}
444
John Zulauf11211402019-11-15 14:02:36 -0700445template <typename AspectTraits>
446class AspectParametersImpl : public AspectParameters {
447 public:
448 VkImageAspectFlags AspectMask() const override { return AspectTraits::kAspectMask; }
449 MaskIndexFunc MaskToIndexFunction() const override { return &AspectTraits::MaskIndex; }
450 uint32_t AspectCount() const override { return AspectTraits::kAspectCount; };
451 const VkImageAspectFlagBits* AspectBits() const override { return AspectTraits::AspectBits().data(); }
452};
453
454struct NullAspectTraits {
455 static constexpr uint32_t kAspectCount = 0;
456 static constexpr VkImageAspectFlags kAspectMask = 0;
457 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
458 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
459 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{};
460 return kAspectBits;
461 }
462};
463
464struct ColorAspectTraits {
465 static constexpr uint32_t kAspectCount = 1;
466 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
467 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
468 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
469 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_COLOR_BIT}};
470 return kAspectBits;
471 }
472};
473
474struct DepthAspectTraits {
475 static constexpr uint32_t kAspectCount = 1;
476 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
477 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
478 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
479 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_DEPTH_BIT}};
480 return kAspectBits;
481 }
482};
483
484struct StencilAspectTraits {
485 static constexpr uint32_t kAspectCount = 1;
486 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
487 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
488 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
489 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_STENCIL_BIT}};
490 return kAspectBits;
491 }
492};
493
494struct DepthStencilAspectTraits {
495 // VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, >> 1 -> 1 -1 -> 0
496 // VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, >> 1 -> 2 -1 = 1
497 static constexpr uint32_t kAspectCount = 2;
498 static constexpr VkImageAspectFlags kAspectMask = (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
499 static uint32_t MaskIndex(VkImageAspectFlags mask) {
500 uint32_t index = (mask >> 1) - 1;
501 assert((index == 0) || (index == 1));
502 return index;
503 };
504 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
505 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
506 {VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_ASPECT_STENCIL_BIT}};
507 return kAspectBits;
508 }
509};
510
511struct Multiplane2AspectTraits {
512 // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
513 // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
514 static constexpr uint32_t kAspectCount = 2;
515 static constexpr VkImageAspectFlags kAspectMask = (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
516 static uint32_t MaskIndex(VkImageAspectFlags mask) {
517 uint32_t index = (mask >> 4) - 1;
518 assert((index == 0) || (index == 1));
519 return index;
520 };
521 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
522 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
523 {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT}};
524 return kAspectBits;
525 }
526};
527
528struct Multiplane3AspectTraits {
529 // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
530 // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
531 // VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, >> 4 - 1 -> 3
532 static constexpr uint32_t kAspectCount = 3;
533 static constexpr VkImageAspectFlags kAspectMask =
534 (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT | VK_IMAGE_ASPECT_PLANE_2_BIT);
535 static uint32_t MaskIndex(VkImageAspectFlags mask) {
536 uint32_t index = (mask >> 4) - 1;
537 index = index > 2 ? 2 : index;
538 assert((index == 0) || (index == 1) || (index == 2));
539 return index;
540 };
541 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
542 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
543 {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, VK_IMAGE_ASPECT_PLANE_2_BIT}};
544 return kAspectBits;
545 }
546};
547
548// Create the encoder parameter suitable to the full range aspect mask (*must* be canonical)
549const AspectParameters* AspectParameters::Get(VkImageAspectFlags aspect_mask) {
550 // We need a persitent instance of each specialist containing only a VTABLE each
551 static const AspectParametersImpl<ColorAspectTraits> kColorParam;
552 static const AspectParametersImpl<DepthAspectTraits> kDepthParam;
553 static const AspectParametersImpl<StencilAspectTraits> kStencilParam;
554 static const AspectParametersImpl<DepthStencilAspectTraits> kDepthStencilParam;
555 static const AspectParametersImpl<Multiplane2AspectTraits> kMutliplane2Param;
556 static const AspectParametersImpl<Multiplane3AspectTraits> kMutliplane3Param;
557 static const AspectParametersImpl<NullAspectTraits> kNullAspect;
558
559 const AspectParameters* param;
560 switch (aspect_mask) {
561 case ColorAspectTraits::kAspectMask:
562 param = &kColorParam;
563 break;
564 case DepthAspectTraits::kAspectMask:
565 param = &kDepthParam;
566 break;
567 case StencilAspectTraits::kAspectMask:
568 param = &kStencilParam;
569 break;
570 case DepthStencilAspectTraits::kAspectMask:
571 param = &kDepthStencilParam;
572 break;
573 case Multiplane2AspectTraits::kAspectMask:
574 param = &kMutliplane2Param;
575 break;
576 case Multiplane3AspectTraits::kAspectMask:
577 param = &kMutliplane3Param;
578 break;
579 default:
580 assert(false);
581 param = &kNullAspect;
582 }
583 return param;
584}
585
586}; // namespace subresource_adapter