blob: 828ba977b5663a63620cab10903c530bc596d3ac [file] [log] [blame]
John Zulauf11211402019-11-15 14:02:36 -07001/* Copyright (c) 2019 The Khronos Group Inc.
2 * Copyright (c) 2019 Valve Corporation
3 * Copyright (c) 2019 LunarG, Inc.
4 * Copyright (C) 2019 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * John Zulauf <jzulauf@lunarg.com>
19 *
20 */
21#include <cassert>
22#include "subresource_adapter.h"
23
24namespace subresource_adapter {
John Zulauf2ea823e2019-11-19 08:54:59 -070025Subresource::Subresource(const RangeEncoder& encoder, const VkImageSubresource& subres)
26 : VkImageSubresource({0, subres.mipLevel, subres.arrayLayer}), aspect_index() {
27 aspect_index = encoder.LowerBoundFromMask(subres.aspectMask);
28 aspectMask = encoder.AspectBit(aspect_index);
29}
30
31IndexType RangeEncoder::Encode1AspectArrayOnly(const Subresource& pos) const { return pos.arrayLayer; }
32IndexType RangeEncoder::Encode1AspectMipArray(const Subresource& pos) const { return pos.arrayLayer + pos.mipLevel * mip_size_; }
33IndexType RangeEncoder::Encode1AspectMipOnly(const Subresource& pos) const { return pos.mipLevel; }
John Zulauf11211402019-11-15 14:02:36 -070034
35IndexType RangeEncoder::EncodeAspectArrayOnly(const Subresource& pos) const {
John Zulauf2ea823e2019-11-19 08:54:59 -070036 return pos.arrayLayer + aspect_base_[pos.aspect_index];
John Zulauf11211402019-11-15 14:02:36 -070037}
38IndexType RangeEncoder::EncodeAspectMipArray(const Subresource& pos) const {
John Zulauf2ea823e2019-11-19 08:54:59 -070039 return pos.arrayLayer + pos.mipLevel * mip_size_ + aspect_base_[pos.aspect_index];
John Zulauf11211402019-11-15 14:02:36 -070040}
John Zulauf2ea823e2019-11-19 08:54:59 -070041IndexType RangeEncoder::EncodeAspectMipOnly(const Subresource& pos) const { return pos.mipLevel + aspect_base_[pos.aspect_index]; }
John Zulauf11211402019-11-15 14:02:36 -070042
John Zulauf2ea823e2019-11-19 08:54:59 -070043uint32_t RangeEncoder::LowerBoundImpl1(VkImageAspectFlags aspect_mask) const {
44 assert(aspect_mask & aspect_bits_[0]);
45 return 0;
46}
47uint32_t RangeEncoder::LowerBoundWithStartImpl1(VkImageAspectFlags aspect_mask, uint32_t start) const {
48 assert(start == 0);
49 if (aspect_mask & aspect_bits_[0]) {
50 return 0;
51 }
52 return limits_.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -070053}
54
John Zulauf2ea823e2019-11-19 08:54:59 -070055uint32_t RangeEncoder::LowerBoundImpl2(VkImageAspectFlags aspect_mask) const {
56 if (aspect_mask & aspect_bits_[0]) {
57 return 0;
58 }
59 assert(aspect_mask & aspect_bits_[1]);
60 return 1;
John Zulauf11211402019-11-15 14:02:36 -070061}
John Zulauf2ea823e2019-11-19 08:54:59 -070062uint32_t RangeEncoder::LowerBoundWithStartImpl2(VkImageAspectFlags aspect_mask, uint32_t start) const {
63 if (start == 0) {
64 if (aspect_mask & aspect_bits_[0]) {
65 return 0;
66 }
67 } else {
68 assert(start == 1);
69 if (aspect_mask & aspect_bits_[1]) {
70 return 1;
71 }
72 }
73 return limits_.aspect_index;
74}
75
76uint32_t RangeEncoder::LowerBoundImpl3(VkImageAspectFlags aspect_mask) const {
77 if (aspect_mask & aspect_bits_[0]) {
78 return 0;
79 } else if (aspect_mask & aspect_bits_[1]) {
80 return 1;
81 } else {
82 assert(aspect_mask & aspect_bits_[2]);
83 return 2;
84 }
85}
86
87uint32_t RangeEncoder::LowerBoundWithStartImpl3(VkImageAspectFlags aspect_mask, uint32_t start) const {
88 if (start == 0) {
89 if (aspect_mask & aspect_bits_[0]) {
90 return 0;
91 }
92 } else if (start == 1) {
93 if ((aspect_mask & aspect_bits_[1])) {
94 return 1;
95 }
96 } else {
97 assert(start == 2);
98 if ((aspect_mask & aspect_bits_[2])) {
99 return 2;
100 }
101 }
102 return limits_.aspect_index;
103}
104
John Zulauf11211402019-11-15 14:02:36 -0700105void RangeEncoder::PopulateFunctionPointers() {
106 // Select the encode/decode specialists
107 if (limits_.aspect_index == 1) {
108 // One aspect use simplified encode/decode math
John Zulauf2ea823e2019-11-19 08:54:59 -0700109 if (limits_.arrayLayer == 1) { // Same as mip_size_ == 1
John Zulauf11211402019-11-15 14:02:36 -0700110 encode_function_ = &RangeEncoder::Encode1AspectMipOnly;
111 decode_function_ = &RangeEncoder::DecodeAspectMipOnly<1>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700112 } else if (limits_.mipLevel == 1) {
John Zulauf11211402019-11-15 14:02:36 -0700113 encode_function_ = &RangeEncoder::Encode1AspectArrayOnly;
114 decode_function_ = &RangeEncoder::DecodeAspectArrayOnly<1>;
115 } else {
116 encode_function_ = &RangeEncoder::Encode1AspectMipArray;
117 decode_function_ = &RangeEncoder::DecodeAspectMipArray<1>;
118 }
John Zulauf2ea823e2019-11-19 08:54:59 -0700119 lower_bound_function_ = &RangeEncoder::LowerBoundImpl1;
120 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl1;
John Zulauf11211402019-11-15 14:02:36 -0700121 } else if (limits_.aspect_index == 2) {
122 // Two aspect use simplified encode/decode math
John Zulauf2ea823e2019-11-19 08:54:59 -0700123 if (limits_.arrayLayer == 1) { // Same as mip_size_ == 1
John Zulauf11211402019-11-15 14:02:36 -0700124 encode_function_ = &RangeEncoder::EncodeAspectMipOnly;
125 decode_function_ = &RangeEncoder::DecodeAspectMipOnly<2>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700126 } else if (limits_.mipLevel == 1) {
John Zulauf11211402019-11-15 14:02:36 -0700127 encode_function_ = &RangeEncoder::EncodeAspectArrayOnly;
128 decode_function_ = &RangeEncoder::DecodeAspectArrayOnly<2>;
129 } else {
130 encode_function_ = &RangeEncoder::EncodeAspectMipArray;
131 decode_function_ = &RangeEncoder::DecodeAspectMipArray<2>;
132 }
John Zulauf2ea823e2019-11-19 08:54:59 -0700133 lower_bound_function_ = &RangeEncoder::LowerBoundImpl2;
134 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl2;
John Zulauf11211402019-11-15 14:02:36 -0700135 } else {
136 encode_function_ = &RangeEncoder::EncodeAspectMipArray;
137 decode_function_ = &RangeEncoder::DecodeAspectMipArray<3>;
John Zulauf2ea823e2019-11-19 08:54:59 -0700138 lower_bound_function_ = &RangeEncoder::LowerBoundImpl3;
139 lower_bound_with_start_function_ = &RangeEncoder::LowerBoundWithStartImpl3;
John Zulauf11211402019-11-15 14:02:36 -0700140 }
141
142 // Initialize the offset array
143 aspect_base_[0] = 0;
144 for (uint32_t i = 1; i < limits_.aspect_index; ++i) {
145 aspect_base_[i] = aspect_base_[i - 1] + aspect_size_;
146 }
147}
John Zulauf11211402019-11-15 14:02:36 -0700148
149RangeEncoder::RangeEncoder(const VkImageSubresourceRange& full_range, const AspectParameters* param)
John Zulauf2ea823e2019-11-19 08:54:59 -0700150 : limits_(param->AspectMask(), full_range.levelCount, full_range.layerCount, param->AspectCount()),
John Zulauf11211402019-11-15 14:02:36 -0700151 mip_size_(full_range.layerCount),
152 aspect_size_(mip_size_ * full_range.levelCount),
153 aspect_bits_(param->AspectBits()),
154 mask_index_function_(param->MaskToIndexFunction()),
155 encode_function_(nullptr),
156 decode_function_(nullptr) {
157 // Only valid to create an encoder for a *whole* image (i.e. base must be zero, and the specified limits_.selected_aspects
158 // *must* be equal to the traits aspect mask. (Encoder range assumes zero bases)
John Zulauf2ea823e2019-11-19 08:54:59 -0700159 assert(full_range.aspectMask == limits_.aspectMask);
John Zulauf11211402019-11-15 14:02:36 -0700160 assert(full_range.baseArrayLayer == 0);
161 assert(full_range.baseMipLevel == 0);
162 // TODO: should be some static assert
163 assert(param->AspectCount() <= kMaxSupportedAspect);
164 PopulateFunctionPointers();
165}
166
John Zulauf2ea823e2019-11-19 08:54:59 -0700167static bool IsValid(const RangeEncoder& encoder, const VkImageSubresourceRange& bounds) {
168 const auto& limits = encoder.Limits();
169 return (((bounds.aspectMask & limits.aspectMask) == bounds.aspectMask) &&
170 (bounds.baseMipLevel + bounds.levelCount <= limits.mipLevel) &&
171 (bounds.baseArrayLayer + bounds.layerCount <= limits.arrayLayer));
John Zulauf11211402019-11-15 14:02:36 -0700172}
173
174// Create an iterator like "generator" that for each increment produces the next index range matching the
175// next contiguous (in index space) section of the VkImageSubresourceRange
176// Ranges will always span the layerCount layers, and if the layerCount is the full range of the image (as known by
177// the encoder) will span the levelCount mip levels as weill.
John Zulauf11211402019-11-15 14:02:36 -0700178RangeGenerator::RangeGenerator(const RangeEncoder& encoder, const VkImageSubresourceRange& subres_range)
179 : encoder_(&encoder), isr_pos_(encoder, subres_range), pos_(), aspect_base_() {
John Zulauf2ea823e2019-11-19 08:54:59 -0700180 assert(IsValid(encoder, isr_pos_.Limits()));
John Zulauf11211402019-11-15 14:02:36 -0700181
John Zulauf2ea823e2019-11-19 08:54:59 -0700182 // To see if we have a full range special case, need to compare the subres_range against the *encoders* limits
183 const auto& limits = encoder.Limits();
184 if ((subres_range.baseArrayLayer == 0 && subres_range.layerCount == limits.arrayLayer)) {
185 if ((subres_range.baseMipLevel == 0) && (subres_range.levelCount == limits.mipLevel)) {
186 if (subres_range.aspectMask == limits.aspectMask) {
187 // Full range
188 pos_.begin = 0;
189 pos_.end = encoder.AspectSize() * limits.aspect_index;
190 aspect_count_ = 1; // Flag this to never advance aspects.
191 } else {
192 // All mips all layers but not all aspect
193 pos_.begin = encoder.AspectBase(isr_pos_.aspect_index);
194 pos_.end = pos_.begin + encoder.AspectSize();
195 aspect_count_ = limits.aspect_index;
196 }
197 } else {
198 // All array layers, but not all levels
199 pos_.begin = encoder.AspectBase(isr_pos_.aspect_index) + subres_range.baseMipLevel * encoder.MipSize();
200 pos_.end = pos_.begin + subres_range.levelCount * encoder.MipSize();
201 aspect_count_ = limits.aspect_index;
202 }
203
John Zulauf11211402019-11-15 14:02:36 -0700204 // Full set of array layers at a time, thus we can span across all selected mip levels
John Zulauf11211402019-11-15 14:02:36 -0700205 mip_count_ = 1; // we don't ever advance across mips, as we do all of then in one range
206 } else {
207 // Each range covers all included array_layers for each selected mip_level for each given selected aspect
John Zulauf2ea823e2019-11-19 08:54:59 -0700208 // so we'll use the general purpose encode and smallest range size
209 pos_.begin = encoder.Encode(isr_pos_);
210 pos_.end = pos_.begin + subres_range.layerCount;
211
John Zulauf11211402019-11-15 14:02:36 -0700212 // we do have to traverse across mips, though (other than Encode abover), we don't have to know which one we are on.
213 mip_count_ = subres_range.levelCount;
John Zulauf2ea823e2019-11-19 08:54:59 -0700214 aspect_count_ = limits.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -0700215 }
216
John Zulauf2ea823e2019-11-19 08:54:59 -0700217 // To get to the next aspect range we offset from the last base
John Zulauf11211402019-11-15 14:02:36 -0700218 aspect_base_ = pos_;
219 mip_index_ = 0;
John Zulauf2ea823e2019-11-19 08:54:59 -0700220 aspect_index_ = isr_pos_.aspect_index;
John Zulauf11211402019-11-15 14:02:36 -0700221}
222
223RangeGenerator& RangeGenerator::operator++() {
224 mip_index_++;
225 // NOTE: If all selected mip levels are done at once, mip_count_ is set to one, not the number of selected mip_levels
226 if (mip_index_ >= mip_count_) {
John Zulauf2ea823e2019-11-19 08:54:59 -0700227 const auto last_aspect_index = aspect_index_;
228 // Seek the next value aspect (if any)
229 aspect_index_ = encoder_->LowerBoundFromMask(isr_pos_.Limits().aspectMask, aspect_index_ + 1);
230 if (aspect_index_ < aspect_count_) {
231 // Force isr_pos to the beginning of this found aspect
John Zulaufdd18b3a2019-11-20 08:30:23 -0700232 isr_pos_.SeekAspect(aspect_index_);
John Zulauf2ea823e2019-11-19 08:54:59 -0700233 // SubresourceGenerator should never be at tombstones we we aren't
234 assert(isr_pos_.aspectMask != 0);
235
236 // Offset by the distance between the last start of aspect and *this* start of aspect
237 aspect_base_ += (encoder_->AspectBase(isr_pos_.aspect_index) - encoder_->AspectBase(last_aspect_index));
John Zulauf11211402019-11-15 14:02:36 -0700238 pos_ = aspect_base_;
239 mip_index_ = 0;
240 } else {
John Zulauf2ea823e2019-11-19 08:54:59 -0700241 // Tombstone both index range and subresource positions to "At end" convention
John Zulauf11211402019-11-15 14:02:36 -0700242 pos_ = {0, 0};
John Zulauf2ea823e2019-11-19 08:54:59 -0700243 isr_pos_.aspectMask = 0;
John Zulauf11211402019-11-15 14:02:36 -0700244 }
John Zulauf11211402019-11-15 14:02:36 -0700245 } else {
246 // Note: for the layerCount < full_range.layerCount case, because the generated ranges per mip_level are discontinuous
John Zulauf2ea823e2019-11-19 08:54:59 -0700247 // we have to do each individual array of ranges
John Zulauf11211402019-11-15 14:02:36 -0700248 pos_ += encoder_->MipSize();
John Zulaufdd18b3a2019-11-20 08:30:23 -0700249 isr_pos_.SeekMip(isr_pos_.Limits().baseMipLevel + mip_index_);
John Zulauf11211402019-11-15 14:02:36 -0700250 }
251 return *this;
252}
253
254template <typename AspectTraits>
255class AspectParametersImpl : public AspectParameters {
256 public:
257 VkImageAspectFlags AspectMask() const override { return AspectTraits::kAspectMask; }
258 MaskIndexFunc MaskToIndexFunction() const override { return &AspectTraits::MaskIndex; }
259 uint32_t AspectCount() const override { return AspectTraits::kAspectCount; };
260 const VkImageAspectFlagBits* AspectBits() const override { return AspectTraits::AspectBits().data(); }
261};
262
263struct NullAspectTraits {
264 static constexpr uint32_t kAspectCount = 0;
265 static constexpr VkImageAspectFlags kAspectMask = 0;
266 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
267 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
268 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{};
269 return kAspectBits;
270 }
271};
272
273struct ColorAspectTraits {
274 static constexpr uint32_t kAspectCount = 1;
275 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
276 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
277 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
278 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_COLOR_BIT}};
279 return kAspectBits;
280 }
281};
282
283struct DepthAspectTraits {
284 static constexpr uint32_t kAspectCount = 1;
285 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
286 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
287 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
288 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_DEPTH_BIT}};
289 return kAspectBits;
290 }
291};
292
293struct StencilAspectTraits {
294 static constexpr uint32_t kAspectCount = 1;
295 static constexpr VkImageAspectFlags kAspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
296 static uint32_t MaskIndex(VkImageAspectFlags mask) { return 0; };
297 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
298 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{{VK_IMAGE_ASPECT_STENCIL_BIT}};
299 return kAspectBits;
300 }
301};
302
303struct DepthStencilAspectTraits {
304 // VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, >> 1 -> 1 -1 -> 0
305 // VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, >> 1 -> 2 -1 = 1
306 static constexpr uint32_t kAspectCount = 2;
307 static constexpr VkImageAspectFlags kAspectMask = (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT);
308 static uint32_t MaskIndex(VkImageAspectFlags mask) {
309 uint32_t index = (mask >> 1) - 1;
310 assert((index == 0) || (index == 1));
311 return index;
312 };
313 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
314 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
315 {VK_IMAGE_ASPECT_DEPTH_BIT, VK_IMAGE_ASPECT_STENCIL_BIT}};
316 return kAspectBits;
317 }
318};
319
320struct Multiplane2AspectTraits {
321 // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
322 // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
323 static constexpr uint32_t kAspectCount = 2;
324 static constexpr VkImageAspectFlags kAspectMask = (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT);
325 static uint32_t MaskIndex(VkImageAspectFlags mask) {
326 uint32_t index = (mask >> 4) - 1;
327 assert((index == 0) || (index == 1));
328 return index;
329 };
330 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
331 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
332 {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT}};
333 return kAspectBits;
334 }
335};
336
337struct Multiplane3AspectTraits {
338 // VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, >> 4 - 1 -> 0
339 // VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, >> 4 - 1 -> 1
340 // VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, >> 4 - 1 -> 3
341 static constexpr uint32_t kAspectCount = 3;
342 static constexpr VkImageAspectFlags kAspectMask =
343 (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT | VK_IMAGE_ASPECT_PLANE_2_BIT);
344 static uint32_t MaskIndex(VkImageAspectFlags mask) {
345 uint32_t index = (mask >> 4) - 1;
346 index = index > 2 ? 2 : index;
347 assert((index == 0) || (index == 1) || (index == 2));
348 return index;
349 };
350 static const std::array<VkImageAspectFlagBits, kAspectCount>& AspectBits() {
351 static std::array<VkImageAspectFlagBits, kAspectCount> kAspectBits{
352 {VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, VK_IMAGE_ASPECT_PLANE_2_BIT}};
353 return kAspectBits;
354 }
355};
356
357// Create the encoder parameter suitable to the full range aspect mask (*must* be canonical)
358const AspectParameters* AspectParameters::Get(VkImageAspectFlags aspect_mask) {
359 // We need a persitent instance of each specialist containing only a VTABLE each
360 static const AspectParametersImpl<ColorAspectTraits> kColorParam;
361 static const AspectParametersImpl<DepthAspectTraits> kDepthParam;
362 static const AspectParametersImpl<StencilAspectTraits> kStencilParam;
363 static const AspectParametersImpl<DepthStencilAspectTraits> kDepthStencilParam;
364 static const AspectParametersImpl<Multiplane2AspectTraits> kMutliplane2Param;
365 static const AspectParametersImpl<Multiplane3AspectTraits> kMutliplane3Param;
366 static const AspectParametersImpl<NullAspectTraits> kNullAspect;
367
368 const AspectParameters* param;
369 switch (aspect_mask) {
370 case ColorAspectTraits::kAspectMask:
371 param = &kColorParam;
372 break;
373 case DepthAspectTraits::kAspectMask:
374 param = &kDepthParam;
375 break;
376 case StencilAspectTraits::kAspectMask:
377 param = &kStencilParam;
378 break;
379 case DepthStencilAspectTraits::kAspectMask:
380 param = &kDepthStencilParam;
381 break;
382 case Multiplane2AspectTraits::kAspectMask:
383 param = &kMutliplane2Param;
384 break;
385 case Multiplane3AspectTraits::kAspectMask:
386 param = &kMutliplane3Param;
387 break;
388 default:
389 assert(false);
390 param = &kNullAspect;
391 }
392 return param;
393}
394
395}; // namespace subresource_adapter