blob: 7ea59180e1036c4aacbc019a54beada29e7d3dc7 [file] [log] [blame]
John Zulauf86ce1cf2020-01-23 12:27:01 -07001/* Copyright (c) 2019-2020 The Khronos Group Inc.
2 * Copyright (c) 2019-2020 Valve Corporation
3 * Copyright (c) 2019-2020 LunarG, Inc.
4 * Copyright (C) 2019-2020 Google Inc.
John Zulauf11211402019-11-15 14:02:36 -07005 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * John Zulauf <jzulauf@lunarg.com>
19 *
20 */
21#include <cassert>
22#include "subresource_adapter.h"
23
24namespace subresource_adapter {
John Zulauf2ea823e2019-11-19 08:54:59 -070025Subresource::Subresource(const RangeEncoder& encoder, const VkImageSubresource& subres)
26 : VkImageSubresource({0, subres.mipLevel, subres.arrayLayer}), aspect_index() {
27 aspect_index = encoder.LowerBoundFromMask(subres.aspectMask);
28 aspectMask = encoder.AspectBit(aspect_index);
29}
30
31IndexType RangeEncoder::Encode1AspectArrayOnly(const Subresource& pos) const { return pos.arrayLayer; }
32IndexType RangeEncoder::Encode1AspectMipArray(const Subresource& pos) const { return pos.arrayLayer + pos.mipLevel * mip_size_; }
33IndexType RangeEncoder::Encode1AspectMipOnly(const Subresource& pos) const { return pos.mipLevel; }
John Zulauf11211402019-11-15 14:02:36 -070034
35IndexType RangeEncoder::EncodeAspectArrayOnly(const Subresource& pos) const {
John Zulauf2ea823e2019-11-19 08:54:59 -070036 return pos.arrayLayer + aspect_base_[pos.aspect_index];
John Zulauf11211402019-11-15 14:02:36 -070037}
38IndexType RangeEncoder::EncodeAspectMipArray(const Subresource& pos) const {
John Zulauf2ea823e2019-11-19 08:54:59 -070039 return pos.arrayLayer + pos.mipLevel * mip_size_ + aspect_base_[pos.aspect_index];
John Zulauf11211402019-11-15 14:02:36 -070040}
John Zulauf2ea823e2019-11-19 08:54:59 -070041IndexType RangeEncoder::EncodeAspectMipOnly(const Subresource& pos) const { return pos.mipLevel + aspect_base_[pos.aspect_index]; }
John Zulauf11211402019-11-15 14:02:36 -070042
John Zulauf2ea823e2019-11-19 08:54:59 -070043uint32_t RangeEncoder::LowerBoundImpl1(VkImageAspectFlags aspect_mask) const {
44 assert(aspect_mask & aspect_bits_[0]);
45 return 0;
46}
47uint32_t RangeEncoder::LowerBoundWithStartImpl1(VkImageAspectFlags aspect_mask, uint32_t start) const {
48 assert(start == 0);
49 if (aspect_mask & aspect_bits_[0]) {
50 return 0;
51 }
52 return limits_.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -070053}
54
John Zulauf2ea823e2019-11-19 08:54:59 -070055uint32_t RangeEncoder::LowerBoundImpl2(VkImageAspectFlags aspect_mask) const {
56 if (aspect_mask & aspect_bits_[0]) {
57 return 0;
58 }
59 assert(aspect_mask & aspect_bits_[1]);
60 return 1;
John Zulauf11211402019-11-15 14:02:36 -070061}
John Zulauf2ea823e2019-11-19 08:54:59 -070062uint32_t RangeEncoder::LowerBoundWithStartImpl2(VkImageAspectFlags aspect_mask, uint32_t start) const {
John Zulauf5823c622019-11-25 13:33:44 -070063 switch (start) {
64 case 0:
65 if (aspect_mask & aspect_bits_[0]) {
66 return 0;
67 }
68 // no break
69 case 1:
70 if (aspect_mask & aspect_bits_[1]) {
71 return 1;
72 }
73 break;
74 default:
75 break;
John Zulauf2ea823e2019-11-19 08:54:59 -070076 }
77 return limits_.aspect_index;
78}
79
80uint32_t RangeEncoder::LowerBoundImpl3(VkImageAspectFlags aspect_mask) const {
81 if (aspect_mask & aspect_bits_[0]) {
82 return 0;
83 } else if (aspect_mask & aspect_bits_[1]) {
84 return 1;
85 } else {
86 assert(aspect_mask & aspect_bits_[2]);
87 return 2;
88 }
89}
90
91uint32_t RangeEncoder::LowerBoundWithStartImpl3(VkImageAspectFlags aspect_mask, uint32_t start) const {
John Zulauf5823c622019-11-25 13:33:44 -070092 switch (start) {
93 case 0:
94 if (aspect_mask & aspect_bits_[0]) {
95 return 0;
96 }
97 // no break
98 case 1:
99 if ((aspect_mask & aspect_bits_[1])) {
100 return 1;
101 }
102 // no break
103 case 2:
104 if ((aspect_mask & aspect_bits_[2])) {
105 return 2;
106 }
107 break;
108 default:
109 break;
John Zulauf2ea823e2019-11-19 08:54:59 -0700110 }
111 return limits_.aspect_index;
112}
113
John Zulauf11211402019-11-15 14:02:36 -0700114void RangeEncoder::PopulateFunctionPointers() {
115 // Select the encode/decode specialists
116 if (limits_.aspect_index == 1) {
117 // One aspect use simplified encode/decode math
John Zulauf2ea823e2019-11-19 08:54:59 -0700118 if (limits_.arrayLayer == 1) { // Same as mip_size_ == 1
John Zulauf11211402019-11-15 14:02:36 -0700119 encode_function_ = &RangeEncoder::Encode1AspectMipOnly;
120 decode_function_ = &RangeEncoder::DecodeAspectMipOnly<1>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700121 } else if (limits_.mipLevel == 1) {
John Zulauf11211402019-11-15 14:02:36 -0700122 encode_function_ = &RangeEncoder::Encode1AspectArrayOnly;
123 decode_function_ = &RangeEncoder::DecodeAspectArrayOnly<1>;
124 } else {
125 encode_function_ = &RangeEncoder::Encode1AspectMipArray;
126 decode_function_ = &RangeEncoder::DecodeAspectMipArray<1>;
127 }
John Zulauf2ea823e2019-11-19 08:54:59 -0700128 lower_bound_function_ = &RangeEncoder::LowerBoundImpl1;
129 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl1;
John Zulauf11211402019-11-15 14:02:36 -0700130 } else if (limits_.aspect_index == 2) {
131 // Two aspect use simplified encode/decode math
John Zulauf2ea823e2019-11-19 08:54:59 -0700132 if (limits_.arrayLayer == 1) { // Same as mip_size_ == 1
John Zulauf11211402019-11-15 14:02:36 -0700133 encode_function_ = &RangeEncoder::EncodeAspectMipOnly;
134 decode_function_ = &RangeEncoder::DecodeAspectMipOnly<2>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700135 } else if (limits_.mipLevel == 1) {
John Zulauf11211402019-11-15 14:02:36 -0700136 encode_function_ = &RangeEncoder::EncodeAspectArrayOnly;
137 decode_function_ = &RangeEncoder::DecodeAspectArrayOnly<2>;
138 } else {
139 encode_function_ = &RangeEncoder::EncodeAspectMipArray;
140 decode_function_ = &RangeEncoder::DecodeAspectMipArray<2>;
141 }
John Zulauf2ea823e2019-11-19 08:54:59 -0700142 lower_bound_function_ = &RangeEncoder::LowerBoundImpl2;
143 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl2;
John Zulauf11211402019-11-15 14:02:36 -0700144 } else {
145 encode_function_ = &RangeEncoder::EncodeAspectMipArray;
146 decode_function_ = &RangeEncoder::DecodeAspectMipArray<3>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700147 lower_bound_function_ = &RangeEncoder::LowerBoundImpl3;
148 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl3;
John Zulauf11211402019-11-15 14:02:36 -0700149 }
150
151 // Initialize the offset array
152 aspect_base_[0] = 0;
153 for (uint32_t i = 1; i < limits_.aspect_index; ++i) {
154 aspect_base_[i] = aspect_base_[i - 1] + aspect_size_;
155 }
156}
John Zulauf11211402019-11-15 14:02:36 -0700157
158RangeEncoder::RangeEncoder(const VkImageSubresourceRange& full_range, const AspectParameters* param)
John Zulauf6066f732019-11-21 13:15:10 -0700159 : full_range_(full_range),
160 limits_(param->AspectMask(), full_range.levelCount, full_range.layerCount, param->AspectCount()),
John Zulauf11211402019-11-15 14:02:36 -0700161 mip_size_(full_range.layerCount),
162 aspect_size_(mip_size_ * full_range.levelCount),
163 aspect_bits_(param->AspectBits()),
164 mask_index_function_(param->MaskToIndexFunction()),
165 encode_function_(nullptr),
166 decode_function_(nullptr) {
167 // Only valid to create an encoder for a *whole* image (i.e. base must be zero, and the specified limits_.selected_aspects
168 // *must* be equal to the traits aspect mask. (Encoder range assumes zero bases)
John Zulauf2ea823e2019-11-19 08:54:59 -0700169 assert(full_range.aspectMask == limits_.aspectMask);
John Zulauf11211402019-11-15 14:02:36 -0700170 assert(full_range.baseArrayLayer == 0);
171 assert(full_range.baseMipLevel == 0);
172 // TODO: should be some static assert
173 assert(param->AspectCount() <= kMaxSupportedAspect);
174 PopulateFunctionPointers();
175}
176
locke-lunarg5faaff52020-02-27 14:31:11 -0700177SubresourceOffset::SubresourceOffset(const OffsetRangeEncoder& encoder, const VkImageSubresource& subres, const VkOffset3D& offset_)
178 : Subresource(encoder, subres), offset({offset_.x, offset_.y}) {
179 if (offset_.z > 1) {
180 arrayLayer = offset_.z;
181 }
182}
183
184OffsetRangeEncoder::OffsetRangeEncoder(const VkImageSubresourceRange& full_range, const VkExtent3D& full_range_image_extent,
185 const AspectParameters* param)
186 : RangeEncoder(full_range, param),
187 full_range_image_extent_(full_range_image_extent),
188 limits_(param->AspectMask(), full_range.levelCount, full_range.layerCount, param->AspectCount(),
189 {static_cast<int32_t>(full_range_image_extent_.width), static_cast<int32_t>(full_range_image_extent_.height),
190 static_cast<int32_t>(full_range_image_extent_.depth)}),
191 offset_size_({static_cast<int32_t>(limits_.aspect_index * AspectSize()),
192 static_cast<int32_t>(limits_.aspect_index * AspectSize() * limits_.offset.x)}),
193 encode_offset_function_(nullptr),
194 decode_offset_function_(nullptr) {
195 if (full_range_image_extent_.depth > 1) {
196 limits_.arrayLayer = full_range_image_extent_.depth;
197 }
198 PopulateFunctionPointers();
199}
200
201void OffsetRangeEncoder::PopulateFunctionPointers() {
202 // Select the encode/decode specialists
203 if (limits_.offset.y == 1) {
204 encode_offset_function_ = &OffsetRangeEncoder::Encode1D;
205 decode_offset_function_ = &OffsetRangeEncoder::Decode1D;
206 } else {
207 encode_offset_function_ = &OffsetRangeEncoder::Encode2D;
208 decode_offset_function_ = &OffsetRangeEncoder::Decode2D;
209 }
210}
211
212IndexType OffsetRangeEncoder::Encode1D(const SubresourceOffset& pos) const { return pos.offset.x * OffsetXSize(); }
213
214IndexType OffsetRangeEncoder::Encode2D(const SubresourceOffset& pos) const {
215 return (pos.offset.x * OffsetXSize()) + (pos.offset.y * OffsetYSize());
216}
217
218IndexType OffsetRangeEncoder::Decode1D(const IndexType& encode, SubresourceOffset& offset_decode) const {
219 offset_decode.offset.y = 1;
220 offset_decode.offset.x = static_cast<int32_t>(encode / OffsetXSize());
221 return (encode % OffsetXSize());
222}
223
224IndexType OffsetRangeEncoder::Decode2D(const IndexType& encode, SubresourceOffset& offset_decode) const {
225 offset_decode.offset.y = static_cast<int32_t>(encode / OffsetYSize());
226 const IndexType new_encode = encode - OffsetYSize() * offset_decode.offset.y;
227 offset_decode.offset.x = static_cast<int32_t>(new_encode / OffsetXSize());
228 return (new_encode % OffsetXSize());
229}
230
231static bool IsValid(const RangeEncoder& encoder, const VkImageSubresourceRange& bounds) {
232 const auto& limits = encoder.Limits();
233 return (((bounds.aspectMask & limits.aspectMask) == bounds.aspectMask) &&
234 (bounds.baseMipLevel + bounds.levelCount <= limits.mipLevel) &&
235 (bounds.baseArrayLayer + bounds.layerCount <= limits.arrayLayer));
236}
237
John Zulauf11211402019-11-15 14:02:36 -0700238// Create an iterator like "generator" that for each increment produces the next index range matching the
239// next contiguous (in index space) section of the VkImageSubresourceRange
240// Ranges will always span the layerCount layers, and if the layerCount is the full range of the image (as known by
241// the encoder) will span the levelCount mip levels as weill.
John Zulauf11211402019-11-15 14:02:36 -0700242RangeGenerator::RangeGenerator(const RangeEncoder& encoder, const VkImageSubresourceRange& subres_range)
243 : encoder_(&encoder), isr_pos_(encoder, subres_range), pos_(), aspect_base_() {
Mark Lobodzinskibb279b92020-05-08 13:03:52 -0600244 assert((((isr_pos_.Limits()).aspectMask & (encoder.Limits()).aspectMask) == (isr_pos_.Limits()).aspectMask) &&
245 ((isr_pos_.Limits()).baseMipLevel + (isr_pos_.Limits()).levelCount <= (encoder.Limits()).mipLevel) &&
246 ((isr_pos_.Limits()).baseArrayLayer + (isr_pos_.Limits()).layerCount <= (encoder.Limits()).arrayLayer));
John Zulauf11211402019-11-15 14:02:36 -0700247
John Zulauf2ea823e2019-11-19 08:54:59 -0700248 // To see if we have a full range special case, need to compare the subres_range against the *encoders* limits
249 const auto& limits = encoder.Limits();
250 if ((subres_range.baseArrayLayer == 0 && subres_range.layerCount == limits.arrayLayer)) {
251 if ((subres_range.baseMipLevel == 0) && (subres_range.levelCount == limits.mipLevel)) {
252 if (subres_range.aspectMask == limits.aspectMask) {
253 // Full range
254 pos_.begin = 0;
255 pos_.end = encoder.AspectSize() * limits.aspect_index;
256 aspect_count_ = 1; // Flag this to never advance aspects.
257 } else {
258 // All mips all layers but not all aspect
259 pos_.begin = encoder.AspectBase(isr_pos_.aspect_index);
260 pos_.end = pos_.begin + encoder.AspectSize();
261 aspect_count_ = limits.aspect_index;
262 }
263 } else {
264 // All array layers, but not all levels
265 pos_.begin = encoder.AspectBase(isr_pos_.aspect_index) + subres_range.baseMipLevel * encoder.MipSize();
266 pos_.end = pos_.begin + subres_range.levelCount * encoder.MipSize();
267 aspect_count_ = limits.aspect_index;
268 }
269
John Zulauf11211402019-11-15 14:02:36 -0700270 // Full set of array layers at a time, thus we can span across all selected mip levels
John Zulauf11211402019-11-15 14:02:36 -0700271 mip_count_ = 1; // we don't ever advance across mips, as we do all of then in one range
272 } else {
273 // Each range covers all included array_layers for each selected mip_level for each given selected aspect
John Zulauf2ea823e2019-11-19 08:54:59 -0700274 // so we'll use the general purpose encode and smallest range size
275 pos_.begin = encoder.Encode(isr_pos_);
276 pos_.end = pos_.begin + subres_range.layerCount;
277
John Zulauf11211402019-11-15 14:02:36 -0700278 // we do have to traverse across mips, though (other than Encode abover), we don't have to know which one we are on.
279 mip_count_ = subres_range.levelCount;
John Zulauf2ea823e2019-11-19 08:54:59 -0700280 aspect_count_ = limits.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -0700281 }
282
John Zulauf2ea823e2019-11-19 08:54:59 -0700283 // To get to the next aspect range we offset from the last base
John Zulauf11211402019-11-15 14:02:36 -0700284 aspect_base_ = pos_;
285 mip_index_ = 0;
John Zulauf2ea823e2019-11-19 08:54:59 -0700286 aspect_index_ = isr_pos_.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -0700287}
288
289RangeGenerator& RangeGenerator::operator++() {
290 mip_index_++;
291 // NOTE: If all selected mip levels are done at once, mip_count_ is set to one, not the number of selected mip_levels
292 if (mip_index_ >= mip_count_) {
John Zulauf2ea823e2019-11-19 08:54:59 -0700293 const auto last_aspect_index = aspect_index_;
294 // Seek the next value aspect (if any)
295 aspect_index_ = encoder_->LowerBoundFromMask(isr_pos_.Limits().aspectMask, aspect_index_ + 1);
296 if (aspect_index_ < aspect_count_) {
297 // Force isr_pos to the beginning of this found aspect
John Zulaufdd18b3a2019-11-20 08:30:23 -0700298 isr_pos_.SeekAspect(aspect_index_);
John Zulauf2ea823e2019-11-19 08:54:59 -0700299 // SubresourceGenerator should never be at tombstones we we aren't
300 assert(isr_pos_.aspectMask != 0);
301
302 // Offset by the distance between the last start of aspect and *this* start of aspect
303 aspect_base_ += (encoder_->AspectBase(isr_pos_.aspect_index) - encoder_->AspectBase(last_aspect_index));
John Zulauf11211402019-11-15 14:02:36 -0700304 pos_ = aspect_base_;
305 mip_index_ = 0;
306 } else {
John Zulauf2ea823e2019-11-19 08:54:59 -0700307 // Tombstone both index range and subresource positions to "At end" convention
John Zulauf11211402019-11-15 14:02:36 -0700308 pos_ = {0, 0};
John Zulauf2ea823e2019-11-19 08:54:59 -0700309 isr_pos_.aspectMask = 0;
John Zulauf11211402019-11-15 14:02:36 -0700310 }
John Zulauf11211402019-11-15 14:02:36 -0700311 } else {
312 // Note: for the layerCount < full_range.layerCount case, because the generated ranges per mip_level are discontinuous
John Zulauf2ea823e2019-11-19 08:54:59 -0700313 // we have to do each individual array of ranges
John Zulauf11211402019-11-15 14:02:36 -0700314 pos_ += encoder_->MipSize();
John Zulaufdd18b3a2019-11-20 08:30:23 -0700315 isr_pos_.SeekMip(isr_pos_.Limits().baseMipLevel + mip_index_);
John Zulauf11211402019-11-15 14:02:36 -0700316 }
317 return *this;
318}
319
locke-lunarg5faaff52020-02-27 14:31:11 -0700320static bool IsValid(const OffsetRangeEncoder& encoder, const VkImageSubresourceRange& bounds, const VkOffset2D& offset,
321 const VkExtent2D& extent) {
322 const auto& limits = encoder.Limits();
323 return (((bounds.aspectMask & limits.aspectMask) == bounds.aspectMask) &&
324 (bounds.baseMipLevel + bounds.levelCount <= limits.mipLevel) &&
325 (bounds.baseArrayLayer + bounds.layerCount <= limits.arrayLayer) &&
326 ((offset.x + static_cast<int32_t>(extent.width)) <= limits.offset.x) &&
327 ((offset.y + static_cast<int32_t>(extent.height)) <= limits.offset.y));
328}
329
330OffsetRangeGenerator::OffsetRangeGenerator(const OffsetRangeEncoder& encoder, const VkImageSubresourceRange& subres_range,
331 const VkOffset3D& offset, const VkExtent3D& extent)
332 : encoder_(&encoder), isr_pos_(encoder, subres_range, offset, extent), pos_(), aspect_base_() {
333 assert(IsValid(encoder, isr_pos_.Limits(), isr_pos_.Limits_Offset(), isr_pos_.Limits_Extent()));
334
335 // To see if we have a full range special case, need to compare the subres_range against the *encoders* limits
336 const auto& limits = encoder.Limits();
337 if ((subres_range.baseArrayLayer == 0 && subres_range.layerCount == limits.arrayLayer)) {
338 if ((subres_range.baseMipLevel == 0) && (subres_range.levelCount == limits.mipLevel)) {
339 if (subres_range.aspectMask == limits.aspectMask) {
340 if (offset.x == 0 && extent.width == limits.offset.x) {
341 if (offset.y == 0 && extent.height == limits.offset.y) {
342 // Full range
343 pos_.begin = 0;
344 pos_.end = encoder.OffsetYSize() * limits.offset.y;
345 offset_count_ = {1, 1};
346 } else {
347 // Not full Y range
348 pos_.begin = encoder.OffsetYSize() * offset.y + encoder.OffsetYSize() * offset.y;
349 pos_.end = pos_.begin + encoder.OffsetYSize() * extent.height;
350 offset_count_ = {1, 1};
351 }
352 } else {
353 // Not full X Y range
354 pos_.begin = encoder.OffsetYSize() * offset.y + +encoder.OffsetXSize() * offset.x;
355 pos_.end = pos_.begin + encoder.OffsetXSize() * extent.width;
356 offset_count_ = {1, static_cast<int32_t>(extent.height)};
357 }
358 aspect_count_ = 1;
359 } else {
360 // Not full aspect X Y range
361 pos_.begin = encoder.OffsetYSize() * offset.y + +encoder.OffsetXSize() * offset.x +
362 encoder.AspectBase(isr_pos_.aspect_index);
363 pos_.end = pos_.begin + encoder.AspectSize();
364 aspect_count_ = limits.aspect_index;
365 offset_count_ = {static_cast<int32_t>(extent.width), static_cast<int32_t>(extent.height)};
366 }
367 mip_count_ = 1;
368 } else {
369 // Not full mip aspect X Y range
370 pos_.begin = encoder.OffsetYSize() * offset.y + +encoder.OffsetXSize() * offset.x +
371 encoder.AspectBase(isr_pos_.aspect_index) + subres_range.baseMipLevel * encoder.MipSize();
372 pos_.end = pos_.begin + subres_range.levelCount * encoder.MipSize();
373 aspect_count_ = limits.aspect_index;
374 mip_count_ = 1;
375 offset_count_ = {static_cast<int32_t>(extent.width), static_cast<int32_t>(extent.height)};
376 }
377 } else {
378 pos_.begin = encoder.Encode(isr_pos_);
379 pos_.end = pos_.begin + subres_range.layerCount;
380
381 mip_count_ = subres_range.levelCount;
382 aspect_count_ = limits.aspect_index;
383 offset_count_ = {static_cast<int32_t>(extent.width), static_cast<int32_t>(extent.height)};
384 }
385
386 // To get to the next aspect range we offset from the last base
387 aspect_base_ = pos_;
388 offset_x_base_ = pos_;
389 offset_y_base_ = pos_;
390 mip_index_ = 0;
391 aspect_index_ = isr_pos_.aspect_index;
392 offset_index_ = {0, 0};
393}
394
395OffsetRangeGenerator& OffsetRangeGenerator::operator++() {
396 mip_index_++;
397 // NOTE: If all selected mip levels are done at once, mip_count_ is set to one, not the number of selected mip_levels
398 if (mip_index_ >= mip_count_) {
399 const auto last_aspect_index = aspect_index_;
400 // Seek the next value aspect (if any)
401 aspect_index_ = encoder_->LowerBoundFromMask(isr_pos_.Limits().aspectMask, aspect_index_ + 1);
402 if (aspect_index_ < aspect_count_) {
403 // Force isr_pos to the beginning of this found aspect
404 isr_pos_.SeekAspect(aspect_index_);
405 // SubresourceGenerator should never be at tombstones we we aren't
406 assert(isr_pos_.aspectMask != 0);
407
408 // Offset by the distance between the last start of aspect and *this* start of aspect
409 aspect_base_ += (encoder_->AspectBase(isr_pos_.aspect_index) - encoder_->AspectBase(last_aspect_index));
410 pos_ = aspect_base_;
411 mip_index_ = 0;
412 } else {
413 ++offset_index_.x;
414 if (offset_index_.x < offset_count_.x) {
415 isr_pos_.SeekOffsetX(offset_index_.x);
416 offset_x_base_ += encoder_->OffsetXSize();
417 pos_ = offset_x_base_;
418 aspect_base_ = pos_;
419 mip_index_ = 0;
420 aspect_index_ = encoder_->LowerBoundFromMask(isr_pos_.Limits().aspectMask);
421 } else {
422 ++offset_index_.y;
423 if (offset_index_.y < offset_count_.y) {
424 isr_pos_.SeekOffsetY(offset_index_.y);
425 offset_y_base_ += encoder_->OffsetYSize();
426 pos_ = offset_y_base_;
427 offset_x_base_ = pos_;
428 aspect_base_ = pos_;
429 mip_index_ = 0;
430 aspect_index_ = encoder_->LowerBoundFromMask(isr_pos_.Limits().aspectMask);
431 offset_index_.x = 0;
432 } else {
433 // Tombstone both index range and subresource positions to "At end" convention
434 pos_ = {0, 0};
435 isr_pos_.aspectMask = 0;
436 }
437 }
438 }
439 } else {
440 // Note: for the layerCount < full_range.layerCount case, because the generated ranges per mip_level are discontinuous
441 // we have to do each individual array of ranges
442 pos_ += encoder_->MipSize();
443 isr_pos_.SeekMip(isr_pos_.Limits().baseMipLevel + mip_index_);
444 }
445 return *this;
446}
447
John Zulauf11211402019-11-15 14:02:36 -0700448template <typename AspectTraits>
449class AspectParametersImpl : public AspectParameters {
450 public:
451 VkImageAspectFlags AspectMask() const override { return AspectTraits::kAspectMask; }
452 MaskIndexFunc MaskToIndexFunction() const override { return &AspectTraits::MaskIndex; }
453 uint32_t AspectCount() const override { return AspectTraits::kAspectCount; };
454 const VkImageAspectFlagBits* AspectBits() const override { return AspectTraits::AspectBits().data(); }
455};
456
457struct NullAspectTraits {
458 static constexpr uint32_t kAspectCount = 0;
459 static constexpr VkImageAspectFlags kAspectMask = 0;
460 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
461 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
462 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{};
463 return kAspectBits;
464 }
465};
466
467struct ColorAspectTraits {
468 static constexpr uint32_t kAspectCount = 1;
469 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
470 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
471 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
472 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_COLOR_BIT}};
473 return kAspectBits;
474 }
475};
476
477struct DepthAspectTraits {
478 static constexpr uint32_t kAspectCount = 1;
479 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
480 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
481 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
482 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_DEPTH_BIT}};
483 return kAspectBits;
484 }
485};
486
487struct StencilAspectTraits {
488 static constexpr uint32_t kAspectCount = 1;
489 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
490 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
491 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
492 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_STENCIL_BIT}};
493 return kAspectBits;
494 }
495};
496
497struct DepthStencilAspectTraits {
498 // VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, >> 1 -> 1 -1 -> 0
499 // VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, >> 1 -> 2 -1 = 1
500 static constexpr uint32_t kAspectCount = 2;
501 static constexpr VkImageAspectFlags kAspectMask = (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
502 static uint32_t MaskIndex(VkImageAspectFlags mask) {
503 uint32_t index = (mask >> 1) - 1;
504 assert((index == 0) || (index == 1));
505 return index;
506 };
507 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
508 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
509 {VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_ASPECT_STENCIL_BIT}};
510 return kAspectBits;
511 }
512};
513
514struct Multiplane2AspectTraits {
515 // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
516 // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
517 static constexpr uint32_t kAspectCount = 2;
518 static constexpr VkImageAspectFlags kAspectMask = (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
519 static uint32_t MaskIndex(VkImageAspectFlags mask) {
520 uint32_t index = (mask >> 4) - 1;
521 assert((index == 0) || (index == 1));
522 return index;
523 };
524 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
525 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
526 {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT}};
527 return kAspectBits;
528 }
529};
530
531struct Multiplane3AspectTraits {
532 // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
533 // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
534 // VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, >> 4 - 1 -> 3
535 static constexpr uint32_t kAspectCount = 3;
536 static constexpr VkImageAspectFlags kAspectMask =
537 (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT | VK_IMAGE_ASPECT_PLANE_2_BIT);
538 static uint32_t MaskIndex(VkImageAspectFlags mask) {
539 uint32_t index = (mask >> 4) - 1;
540 index = index > 2 ? 2 : index;
541 assert((index == 0) || (index == 1) || (index == 2));
542 return index;
543 };
544 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
545 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
546 {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, VK_IMAGE_ASPECT_PLANE_2_BIT}};
547 return kAspectBits;
548 }
549};
550
551// Create the encoder parameter suitable to the full range aspect mask (*must* be canonical)
552const AspectParameters* AspectParameters::Get(VkImageAspectFlags aspect_mask) {
553 // We need a persitent instance of each specialist containing only a VTABLE each
554 static const AspectParametersImpl<ColorAspectTraits> kColorParam;
555 static const AspectParametersImpl<DepthAspectTraits> kDepthParam;
556 static const AspectParametersImpl<StencilAspectTraits> kStencilParam;
557 static const AspectParametersImpl<DepthStencilAspectTraits> kDepthStencilParam;
558 static const AspectParametersImpl<Multiplane2AspectTraits> kMutliplane2Param;
559 static const AspectParametersImpl<Multiplane3AspectTraits> kMutliplane3Param;
560 static const AspectParametersImpl<NullAspectTraits> kNullAspect;
561
562 const AspectParameters* param;
563 switch (aspect_mask) {
564 case ColorAspectTraits::kAspectMask:
565 param = &kColorParam;
566 break;
567 case DepthAspectTraits::kAspectMask:
568 param = &kDepthParam;
569 break;
570 case StencilAspectTraits::kAspectMask:
571 param = &kStencilParam;
572 break;
573 case DepthStencilAspectTraits::kAspectMask:
574 param = &kDepthStencilParam;
575 break;
576 case Multiplane2AspectTraits::kAspectMask:
577 param = &kMutliplane2Param;
578 break;
579 case Multiplane3AspectTraits::kAspectMask:
580 param = &kMutliplane3Param;
581 break;
582 default:
583 assert(false);
584 param = &kNullAspect;
585 }
586 return param;
587}
588
589}; // namespace subresource_adapter