blob: 6926c367b61ddddf552192e609a63a388db1d91a [file] [log] [blame]
John Zulauf11211402019-11-15 14:02:36 -07001/* Copyright (c) 2019 The Khronos Group Inc.
2 * Copyright (c) 2019 Valve Corporation
3 * Copyright (c) 2019 LunarG, Inc.
4 * Copyright (C) 2019 Google Inc.
5 *
6 * Licensed under the Apache License, Version 2.0 (the "License");
7 * you may not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * http://www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an "AS IS" BASIS,
14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 *
18 * John Zulauf <jzulauf@lunarg.com>
19 *
20 */
John Zulauf5823c622019-11-25 13:33:44 -070021#ifdef SPARSE_CONTAINER_UNIT_TEST
22#include "image_layout_map.h"
23#else
John Zulauf11211402019-11-15 14:02:36 -070024#include "core_validation_types.h"
25#include "chassis.h"
26#include "descriptor_sets.h"
John Zulauf5823c622019-11-25 13:33:44 -070027#endif
John Zulauf11211402019-11-15 14:02:36 -070028
29namespace image_layout_map {
30// Storage for the static state
31const ImageSubresourceLayoutMap::ConstIterator ImageSubresourceLayoutMap::end_iterator = ImageSubresourceLayoutMap::ConstIterator();
32
33InitialLayoutState::InitialLayoutState(const CMD_BUFFER_STATE& cb_state_, const IMAGE_VIEW_STATE* view_state_)
34 : image_view(VK_NULL_HANDLE), aspect_mask(0), label(cb_state_.debug_label) {
35 if (view_state_) {
36 image_view = view_state_->image_view;
37 aspect_mask = view_state_->create_info.subresourceRange.aspectMask;
38 }
39}
40bool ImageSubresourceLayoutMap::SubresourceLayout::operator==(const ImageSubresourceLayoutMap::SubresourceLayout& rhs) const {
41 bool is_equal =
42 (current_layout == rhs.current_layout) && (initial_layout == rhs.initial_layout) && (subresource == rhs.subresource);
43 return is_equal;
44}
45ImageSubresourceLayoutMap::ImageSubresourceLayoutMap(const IMAGE_STATE& image_state)
46 : encoder_(image_state.full_range),
47 image_state_(image_state),
48 layouts_(),
John Zulauf6066f732019-11-21 13:15:10 -070049 current_layout_view_(layouts_.current, encoder_),
50 initial_layout_view_(layouts_.initial, encoder_),
John Zulauf11211402019-11-15 14:02:36 -070051 initial_layout_states_(),
52 initial_layout_state_map_() {}
53
54ImageSubresourceLayoutMap::ConstIterator ImageSubresourceLayoutMap::Begin(bool always_get_initial) const {
55 return Find(image_state_.full_range, /* skip_invalid */ true, always_get_initial);
56}
57bool ImageSubresourceLayoutMap::SetSubresourceRangeLayout(const CMD_BUFFER_STATE& cb_state, const VkImageSubresourceRange& range,
58 VkImageLayout layout, VkImageLayout expected_layout) {
59 bool updated = false;
60 if (expected_layout == kInvalidLayout) {
61 // Set the initial layout to the set layout as we had no other layout to reference
62 expected_layout = layout;
63 }
64 if (!InRange(range)) return false; // Don't even try to track bogus subreources
65
66 InitialLayoutState* initial_state = nullptr;
67 RangeGenerator range_gen(encoder_, range);
68 // Empty range are the range tombstones
69 for (; range_gen->non_empty(); ++range_gen) {
70 // In order to track whether we've changed anything, we'll do this in a slightly convoluted way...
71 // We'll traverse the range looking for values different from ours, then overwrite the range.
72 auto lower = layouts_.current.lower_bound(*range_gen);
73 bool all_same = false;
74 bool contiguous = false;
75 if (layouts_.current.is_contiguous(*range_gen, lower)) {
76 // The whole range is set to a value, see if assigning to it will change anything...
77 all_same = true;
78 contiguous = true;
79 for (auto pos = lower; (pos != layouts_.current.end()) && pos->first.intersects(*range_gen) && all_same; ++pos) {
80 all_same = pos->second == layout;
81 }
82 }
83 if (!all_same) {
84 // We only need to try setting anything, if we changed any of the layout values above
85 layouts_.current.overwrite_range(lower, std::make_pair(*range_gen, layout));
86 updated = true;
87 // We insert only into gaps (this is a write once semantic), and if the range
John Zulauff3eeba62019-11-22 15:09:07 -070088 // isn't already contiguous, i.e. has no gaps. If current is contiguous, we know initial is too, but
89 // we also have to check for the case where a discontiguous current has contiguous intitial information
John Zulauf11211402019-11-15 14:02:36 -070090 if (!contiguous) {
John Zulauff3eeba62019-11-22 15:09:07 -070091 auto initial_lower = layouts_.initial.lower_bound(*range_gen);
92 bool update_needed = !layouts_.initial.is_contiguous(*range_gen, initial_lower);
93 if (update_needed) {
94 layouts_.initial.insert_range(initial_lower, std::make_pair(*range_gen, expected_layout), NoSplit());
95 initial_state = UpdateInitialLayoutState(*range_gen, initial_state, cb_state, nullptr);
96 }
John Zulauf11211402019-11-15 14:02:36 -070097 }
98 }
99 }
100 return updated;
101}
102bool ImageSubresourceLayoutMap::SetSubresourceRangeInitialLayout(const CMD_BUFFER_STATE& cb_state,
103 const VkImageSubresourceRange& range, VkImageLayout layout,
104 const IMAGE_VIEW_STATE* view_state) {
105 bool updated = false;
106 if (!InRange(range)) return false; // Don't even try to track bogus subreources
107
108 InitialLayoutState* initial_state = nullptr;
109 RangeGenerator range_gen(encoder_, range);
110
111 for (; range_gen->non_empty(); ++range_gen) {
112 auto lower = layouts_.initial.lower_bound(*range_gen);
113 bool update_needed = !layouts_.initial.is_contiguous(*range_gen, lower);
114 if (update_needed) {
115 layouts_.initial.insert_range(lower, std::make_pair(*range_gen, layout), NoSplit());
116 initial_state = UpdateInitialLayoutState(*range_gen, initial_state, cb_state, view_state);
117 updated = true;
118 }
119 }
120 return updated;
121}
122
123static VkImageLayout FindInMap(IndexType index, const ImageSubresourceLayoutMap::RangeMap& map) {
124 auto found = map.find(index);
125 VkImageLayout value = kInvalidLayout;
126 if (found != map.end()) {
127 value = found->second;
128 }
129 return value;
130}
131VkImageLayout ImageSubresourceLayoutMap::GetSubresourceLayout(const VkImageSubresource& subresource) const {
132 IndexType index = encoder_.Encode(subresource);
133 return FindInMap(index, layouts_.current);
134}
135
136VkImageLayout ImageSubresourceLayoutMap::GetSubresourceInitialLayout(const VkImageSubresource& subresource) const {
137 IndexType index = encoder_.Encode(subresource);
138 return FindInMap(index, layouts_.initial);
139}
140
141// Saves an encode to fetch both in the same call
142ImageSubresourceLayoutMap::Layouts ImageSubresourceLayoutMap::GetSubresourceLayouts(const VkImageSubresource& subresource,
143 bool always_get_initial) const {
144 IndexType index = encoder_.Encode(subresource);
145 Layouts layouts{FindInMap(index, layouts_.current), kInvalidLayout};
146 if (always_get_initial || (layouts.current_layout != kInvalidLayout)) {
147 layouts.initial_layout = FindInMap(index, layouts_.initial);
148 }
149 return layouts;
150}
151
152const InitialLayoutState* ImageSubresourceLayoutMap::GetSubresourceInitialLayoutState(const VkImageSubresource subresource) const {
153 if (!InRange(subresource)) return nullptr;
154 const auto index = encoder_.Encode(subresource);
155 const auto found = initial_layout_state_map_.find(index);
156 if (found != initial_layout_state_map_.end()) {
157 return found->second;
158 }
159 return nullptr;
160}
161
162// TODO: make sure this paranoia check is sufficient and not too much.
163uintptr_t ImageSubresourceLayoutMap::CompatibilityKey() const {
164 return (reinterpret_cast<const uintptr_t>(&image_state_) ^ encoder_.AspectMask());
165}
166
167bool ImageSubresourceLayoutMap::UpdateFrom(const ImageSubresourceLayoutMap& other) {
168 using Arbiter = sparse_container::splice_precedence;
169
170 using sparse_container::range;
171 // Must be from matching images for the reinterpret cast to be valid
172 assert(CompatibilityKey() == other.CompatibilityKey());
173 if (CompatibilityKey() != other.CompatibilityKey()) return false;
174
175 bool updated = false;
176 updated |= sparse_container::splice(&layouts_.initial, other.layouts_.initial, Arbiter::prefer_dest);
177 updated |= sparse_container::splice(&layouts_.current, other.layouts_.current, Arbiter::prefer_source);
178 // NOTE -- we are copying plain pointers from 'other' which owns them as unique_ptr. This works because
179 // currently this function is only used to import from secondary command buffers, destruction of which
180 // invalidate the referencing primary command buffer, meaning that the dangling pointer will either be
181 // cleaned up in invalidation, on not referenced by validation code.
182 sparse_container::splice(&initial_layout_state_map_, other.initial_layout_state_map_, Arbiter::prefer_dest);
183
184 return updated;
185}
186InitialLayoutState* ImageSubresourceLayoutMap::UpdateInitialLayoutState(const IndexRange& range, InitialLayoutState* initial_state,
187 const CMD_BUFFER_STATE& cb_state,
188 const IMAGE_VIEW_STATE* view_state) {
189 if (!initial_state) {
190 // Allocate on demand... initial_layout_states_ holds ownership as a unique_ptr, while
191 // each subresource has a non-owning copy of the plain pointer.
192 initial_state = new InitialLayoutState(cb_state, view_state);
193 initial_layout_states_.emplace_back(initial_state);
194 }
195 assert(initial_state);
196 initial_layout_state_map_.insert_range(std::make_pair(range, initial_state), NoSplit());
197 return initial_state;
198}
199
200// Loop over the given range calling the callback, primarily for
201// validation checks. By default the initial_value is only looked
202// up if the set value isn't found.
203bool ImageSubresourceLayoutMap::ForRange(const VkImageSubresourceRange& range, const Callback& callback, bool skip_invalid,
204 bool always_get_initial) const {
205 if (!InRange(range)) return false; // Don't even try to process bogus subreources
206
207 RangeGenerator range_gen(encoder_, range);
John Zulauf2ea823e2019-11-19 08:54:59 -0700208 SubresourceGenerator& subres_gen = range_gen.GetSubresourceGenerator();
John Zulauf11211402019-11-15 14:02:36 -0700209 ParallelIterator<const RangeMap, const RangeMap> parallel_it(layouts_.current, layouts_.initial, range_gen->begin);
210
211 bool keep_on = true;
212 IndexType current;
213 for (; range_gen->non_empty(); ++range_gen) {
214 current = range_gen->begin;
215 if (!parallel_it->range.includes(current)) { // NOTE: empty ranges can't include anything
216 parallel_it.seek(current);
217 }
218 if (parallel_it->range.empty() && skip_invalid) {
219 // We're past the end of mapped data, and we aren't interested, so we're done
220 break;
221 }
222 while (range_gen->includes(current)) {
223 VkImageLayout layout = kInvalidLayout;
224 VkImageLayout initial_layout = kInvalidLayout;
225 IndexType constant_value_bound = range_gen->end;
226 // The generated range can validly traverse past the end of stored data
227 if (!parallel_it->range.empty()) {
228 layout = sparse_container::evaluate(parallel_it->pos_A, kInvalidLayout);
229 if (layout == kInvalidLayout || always_get_initial) {
230 initial_layout = sparse_container::evaluate(parallel_it->pos_B, kInvalidLayout);
231 }
232 constant_value_bound = std::min(parallel_it->range.end, constant_value_bound);
233 }
234
235 if (!skip_invalid || (layout != kInvalidLayout) || (initial_layout != kInvalidLayout)) {
236 for (; current < constant_value_bound; current++, ++subres_gen) {
237 keep_on = callback(*subres_gen, layout, initial_layout);
238 if (!keep_on) return keep_on; // False value from the callback aborts the range traversal
239 }
240 } else {
John Zulaufdd18b3a2019-11-20 08:30:23 -0700241 subres_gen.Seek(constant_value_bound); // Move the subresource to the end of the skipped range
John Zulauf11211402019-11-15 14:02:36 -0700242 current = constant_value_bound;
243 }
244 // Advance the parallel it if needed and possible
245 if (!parallel_it->range.empty() && !parallel_it->range.includes(current)) {
246 ++parallel_it;
247 }
248 }
249 // ++range_gen will update subres_gen.
250 }
251 return keep_on;
252}
253
254// This is the same constant value range, subreource position advance logic as ForRange above, but suitable for use with
255// an Increment operator.
256void ImageSubresourceLayoutMap::ConstIterator::UpdateRangeAndValue() {
257 bool not_found = true;
258 while (range_gen_->non_empty() && not_found) {
259 if (!parallel_it_->range.includes(current_index_)) { // NOTE: empty ranges can't include anything
260 parallel_it_.seek(current_index_);
261 }
262 if (parallel_it_->range.empty() && skip_invalid_) {
263 // We're past the end of mapped data, and we aren't interested, so we're done
264 // Set end condtion....
265 ForceEndCondition();
266 }
267 // Search within the current range_ for a constant valid constant value interval
268 // The while condition allows the parallel iterator to advance constant value ranges as needed.
269 while (range_gen_->includes(current_index_) && not_found) {
270 pos_.current_layout = kInvalidLayout;
271 pos_.initial_layout = kInvalidLayout;
272 constant_value_bound_ = range_gen_->end;
273 // The generated range can validly traverse past the end of stored data
274 if (!parallel_it_->range.empty()) {
275 pos_.current_layout = sparse_container::evaluate(parallel_it_->pos_A, kInvalidLayout);
276 if (pos_.current_layout == kInvalidLayout || always_get_initial_) {
277 pos_.initial_layout = sparse_container::evaluate(parallel_it_->pos_B, kInvalidLayout);
278 }
279 // The constant value bound marks the end of contiguous (w.r.t. range_gen_) indices with the same value, allowing
280 // Increment (for example) to forgo this logic until finding a new range is needed.
281 constant_value_bound_ = std::min(parallel_it_->range.end, constant_value_bound_);
282 }
283 if (!skip_invalid_ || (pos_.current_layout != kInvalidLayout) || (pos_.initial_layout != kInvalidLayout)) {
284 // we found it ... set the position and exit condition.
John Zulauf2ea823e2019-11-19 08:54:59 -0700285 pos_.subresource = range_gen_.GetSubresource();
John Zulauf11211402019-11-15 14:02:36 -0700286 not_found = false;
287 } else {
288 // We're skipping this constant value range, set the index to the exclusive end and look again
John Zulaufdd18b3a2019-11-20 08:30:23 -0700289 // Note that we ONLY need to Seek the Subresource generator on a skip condition.
290 range_gen_.GetSubresourceGenerator().Seek(
John Zulauf11211402019-11-15 14:02:36 -0700291 constant_value_bound_); // Move the subresource to the end of the skipped range
292 current_index_ = constant_value_bound_;
293
294 // Advance the parallel it if needed and possible
295 // NOTE: We don't need to seek, as current_index_ can only be in the current or next constant value range
296 if (!parallel_it_->range.empty() && !parallel_it_->range.includes(current_index_)) {
297 ++parallel_it_;
298 }
299 }
300 }
301
302 if (not_found) {
303 // ++range_gen will update subres_gen.
304 ++range_gen_;
305 current_index_ = range_gen_->begin;
306 }
307 }
308
309 if (range_gen_->empty()) {
310 ForceEndCondition();
311 }
312}
313
314void ImageSubresourceLayoutMap::ConstIterator::Increment() {
315 ++current_index_;
John Zulauf2ea823e2019-11-19 08:54:59 -0700316 ++(range_gen_.GetSubresourceGenerator());
John Zulauf11211402019-11-15 14:02:36 -0700317 if (constant_value_bound_ <= current_index_) {
318 UpdateRangeAndValue();
319 } else {
John Zulauf2ea823e2019-11-19 08:54:59 -0700320 pos_.subresource = range_gen_.GetSubresource();
John Zulauf11211402019-11-15 14:02:36 -0700321 }
322}
323ImageSubresourceLayoutMap::ConstIterator::ConstIterator(const RangeMap& current, const RangeMap& initial, const Encoder& encoder,
324 const VkImageSubresourceRange& subres, bool skip_invalid,
325 bool always_get_initial)
326 : range_gen_(encoder, subres),
327 parallel_it_(current, initial, range_gen_->begin),
328 skip_invalid_(skip_invalid),
329 always_get_initial_(always_get_initial),
330 pos_(),
331 current_index_(range_gen_->begin),
332 constant_value_bound_() {
333 UpdateRangeAndValue();
334}
335
336} // namespace image_layout_map