blob: edb590af2f99356daee28651973c0570660bba2c [file] [log] [blame]
Jeremy Gebben170781d2020-11-19 16:21:21 -07001/*
sfricke-samsungae54c1e2022-01-21 05:35:21 -08002 * Copyright (c) 2015-2022 The Khronos Group Inc.
3 * Copyright (c) 2015-2022 Valve Corporation
4 * Copyright (c) 2015-2022 LunarG, Inc.
5 * Copyright (c) 2015-2022 Google, Inc.
Jeremy Gebben170781d2020-11-19 16:21:21 -07006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Author: Chia-I Wu <olvaffe@gmail.com>
14 * Author: Chris Forbes <chrisf@ijw.co.nz>
15 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
16 * Author: Mark Lobodzinski <mark@lunarg.com>
17 * Author: Mike Stroyan <mike@LunarG.com>
18 * Author: Tobin Ehlis <tobine@google.com>
19 * Author: Tony Barbour <tony@LunarG.com>
20 * Author: Cody Northrop <cnorthrop@google.com>
21 * Author: Dave Houlton <daveh@lunarg.com>
22 * Author: Jeremy Kniager <jeremyk@lunarg.com>
23 * Author: Shannon McPherson <shannon@lunarg.com>
24 * Author: John Zulauf <jzulauf@lunarg.com>
25 */
26#include <type_traits>
27
28#include "cast_utils.h"
29#include "layer_validation_tests.h"
30
31TEST_F(VkSyncValTest, SyncBufferCopyHazards) {
32 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
33 if (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_BUFFER_MARKER_EXTENSION_NAME)) {
34 m_device_extension_names.push_back(VK_AMD_BUFFER_MARKER_EXTENSION_NAME);
35 }
36 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
37 bool has_amd_buffer_maker = DeviceExtensionEnabled(VK_AMD_BUFFER_MARKER_EXTENSION_NAME);
38
39 VkBufferObj buffer_a;
40 VkBufferObj buffer_b;
41 VkBufferObj buffer_c;
42 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
43 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
44 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
45 buffer_c.init_as_src_and_dst(*m_device, 256, mem_prop);
46
47 VkBufferCopy region = {0, 0, 256};
48 VkBufferCopy front2front = {0, 0, 128};
49 VkBufferCopy front2back = {0, 128, 128};
50 VkBufferCopy back2back = {128, 128, 128};
51
52 auto cb = m_commandBuffer->handle();
53 m_commandBuffer->begin();
54
55 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
56
57 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
58 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
59 m_errorMonitor->VerifyFound();
60
61 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -070062 auto buffer_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -070063 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
64 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
65 buffer_barrier.buffer = buffer_a.handle();
66 buffer_barrier.offset = 0;
67 buffer_barrier.size = 256;
68 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
69 nullptr);
70
Jeremy Gebben170781d2020-11-19 16:21:21 -070071 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
72 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &back2back);
Jeremy Gebben170781d2020-11-19 16:21:21 -070073
74 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
75 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2back);
76 m_errorMonitor->VerifyFound();
77
78 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
79 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
80 m_errorMonitor->VerifyFound();
81
82 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
83 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
84
85 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -070086 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -070087 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
88 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
89 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
90 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -070091
92 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_c.handle(), buffer_b.handle(), 1, &region);
Jeremy Gebben170781d2020-11-19 16:21:21 -070093
94 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
95 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protect C but not B
96 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
97 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
98 nullptr);
99 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_c.handle(), 1, &region);
100 m_errorMonitor->VerifyFound();
101
102 m_commandBuffer->end();
103
104 // CmdFillBuffer
Jeremy Gebben170781d2020-11-19 16:21:21 -0700105 m_commandBuffer->reset();
106 m_commandBuffer->begin();
107 vk::CmdFillBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, 256, 1);
108 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700109
110 m_commandBuffer->reset();
111 m_commandBuffer->begin();
112 vk::CmdCopyBuffer(cb, buffer_b.handle(), buffer_a.handle(), 1, &region);
113 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
114 vk::CmdFillBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, 256, 1);
115 m_errorMonitor->VerifyFound();
116 m_commandBuffer->end();
117
118 // CmdUpdateBuffer
119 int i = 10;
Jeremy Gebben170781d2020-11-19 16:21:21 -0700120 m_commandBuffer->reset();
121 m_commandBuffer->begin();
122 vk::CmdUpdateBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, sizeof(i), &i);
123 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700124
125 m_commandBuffer->reset();
126 m_commandBuffer->begin();
127 vk::CmdCopyBuffer(cb, buffer_b.handle(), buffer_a.handle(), 1, &region);
128 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
129 vk::CmdUpdateBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, sizeof(i), &i);
130 m_errorMonitor->VerifyFound();
131 m_commandBuffer->end();
132
John Zulaufcbf67cf2021-04-26 21:06:32 -0600133 // Create secondary buffers to use
John Zulaufcbf67cf2021-04-26 21:06:32 -0600134 VkCommandBufferObj secondary_cb1(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
135 VkCommandBuffer scb1 = secondary_cb1.handle();
136 secondary_cb1.begin();
137 vk::CmdCopyBuffer(scb1, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
138 secondary_cb1.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600139
John Zulaufcbf67cf2021-04-26 21:06:32 -0600140 VkCommandBufferObj secondary_cb2(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
141 VkCommandBuffer scb2 = secondary_cb2.handle();
142 secondary_cb2.begin();
143 vk::CmdCopyBuffer(scb2, buffer_a.handle(), buffer_c.handle(), 1, &front2front);
144 secondary_cb2.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600145
John Zulaufcbf67cf2021-04-26 21:06:32 -0600146 VkCommandBufferObj secondary_cb3(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
147 VkCommandBuffer scb3 = secondary_cb3.handle();
148 secondary_cb3.begin();
149 secondary_cb3.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 0,
150 nullptr);
151 secondary_cb3.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600152
John Zulaufcbf67cf2021-04-26 21:06:32 -0600153 VkCommandBufferObj secondary_cb4(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
154 VkCommandBuffer scb4 = secondary_cb4.handle();
155 secondary_cb4.begin();
156 vk::CmdCopyBuffer(scb4, buffer_b.handle(), buffer_c.handle(), 1, &front2front);
157 secondary_cb4.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600158
159 // One secondary CB hazard with active command buffer
John Zulaufee17cce2021-04-15 18:21:38 -0600160 m_commandBuffer->reset();
161 m_commandBuffer->begin();
162 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
John Zulaufee17cce2021-04-15 18:21:38 -0600163 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
John Zulaufcbf67cf2021-04-26 21:06:32 -0600164 vk::CmdExecuteCommands(cb, 1, &scb1);
John Zulaufee17cce2021-04-15 18:21:38 -0600165 m_errorMonitor->VerifyFound();
166 m_commandBuffer->end();
167
John Zulaufcbf67cf2021-04-26 21:06:32 -0600168 // Two secondary CB hazard with each other
John Zulaufee17cce2021-04-15 18:21:38 -0600169 m_commandBuffer->reset();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600170 m_commandBuffer->begin();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600171 // This is also a "SYNC-HAZARD-WRITE_AFTER_WRITE" present, but only the first hazard is reported.
172 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
173 {
174 VkCommandBuffer two_cbs[2] = {scb1, scb2};
175 vk::CmdExecuteCommands(cb, 2, two_cbs);
176 }
177 m_errorMonitor->VerifyFound();
178 m_commandBuffer->end();
John Zulaufee17cce2021-04-15 18:21:38 -0600179
John Zulaufcbf67cf2021-04-26 21:06:32 -0600180 // Two secondary CB hazard with each other
181 m_commandBuffer->reset();
182 m_commandBuffer->begin();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600183 {
184 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
185 VkCommandBuffer two_cbs[2] = {scb1, scb4};
186 vk::CmdExecuteCommands(cb, 2, two_cbs);
187 m_errorMonitor->VerifyFound();
188 }
189 m_commandBuffer->end();
190
191 // Add a secondary CB with a barrier
192 m_commandBuffer->reset();
193 m_commandBuffer->begin();
194 {
John Zulaufcbf67cf2021-04-26 21:06:32 -0600195 VkCommandBuffer three_cbs[3] = {scb1, scb3, scb4};
196 vk::CmdExecuteCommands(cb, 3, three_cbs);
John Zulaufcbf67cf2021-04-26 21:06:32 -0600197 }
198 m_commandBuffer->end();
199
200 m_commandBuffer->reset();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700201 // CmdWriteBufferMarkerAMD
202 if (has_amd_buffer_maker) {
203 auto fpCmdWriteBufferMarkerAMD =
204 (PFN_vkCmdWriteBufferMarkerAMD)vk::GetDeviceProcAddr(m_device->device(), "vkCmdWriteBufferMarkerAMD");
205 if (!fpCmdWriteBufferMarkerAMD) {
206 printf("%s Test requires unsupported vkCmdWriteBufferMarkerAMD feature. Skipped.\n", kSkipPrefix);
207 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -0700208 m_commandBuffer->reset();
209 m_commandBuffer->begin();
210 fpCmdWriteBufferMarkerAMD(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, buffer_a.handle(), 0, 1);
211 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700212
213 m_commandBuffer->reset();
214 m_commandBuffer->begin();
215 vk::CmdCopyBuffer(cb, buffer_b.handle(), buffer_a.handle(), 1, &region);
216 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
217 fpCmdWriteBufferMarkerAMD(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, buffer_a.handle(), 0, 1);
218 m_errorMonitor->VerifyFound();
219 m_commandBuffer->end();
220 }
221 } else {
222 printf("%s Test requires unsupported vkCmdWriteBufferMarkerAMD feature. Skipped.\n", kSkipPrefix);
223 }
224}
225
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700226TEST_F(VkSyncValTest, Sync2BufferCopyHazards) {
227 SetTargetApiVersion(VK_API_VERSION_1_2);
228 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
229 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME)) {
230 m_device_extension_names.push_back(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
231 } else {
sjfricke20f4a872022-08-12 08:28:05 +0900232 GTEST_SKIP() << "Synchronization2 not supported";
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700233 }
234
235 if (!CheckSynchronization2SupportAndInitState(this)) {
sjfricke20f4a872022-08-12 08:28:05 +0900236 GTEST_SKIP() << "Synchronization2 not supported";
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700237 }
238 auto fpCmdPipelineBarrier2KHR = (PFN_vkCmdPipelineBarrier2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPipelineBarrier2KHR");
239
240 VkBufferObj buffer_a;
241 VkBufferObj buffer_b;
242 VkBufferObj buffer_c;
243 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
244 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
245 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
246 buffer_c.init_as_src_and_dst(*m_device, 256, mem_prop);
247
248 VkBufferCopy region = {0, 0, 256};
249 VkBufferCopy front2front = {0, 0, 128};
250 VkBufferCopy front2back = {0, 128, 128};
251 VkBufferCopy back2back = {128, 128, 128};
252
253 auto cb = m_commandBuffer->handle();
254 m_commandBuffer->begin();
255
256 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
257
258 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
259 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
260 m_errorMonitor->VerifyFound();
261
262 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
263 {
264 auto buffer_barrier = lvl_init_struct<VkBufferMemoryBarrier2KHR>();
265 buffer_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
266 buffer_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
267 buffer_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR;
268 buffer_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
269 buffer_barrier.buffer = buffer_a.handle();
270 buffer_barrier.offset = 0;
271 buffer_barrier.size = 256;
272 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
273 dep_info.bufferMemoryBarrierCount = 1;
274 dep_info.pBufferMemoryBarriers = &buffer_barrier;
275 fpCmdPipelineBarrier2KHR(cb, &dep_info);
276 }
277
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700278 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
279 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &back2back);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700280
281 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
282 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2back);
283 m_errorMonitor->VerifyFound();
284
285 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
286 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
287 m_errorMonitor->VerifyFound();
288
289 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
290 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
291
292 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
293 {
294 auto mem_barrier = lvl_init_struct<VkMemoryBarrier2KHR>();
295 mem_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
296 mem_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
297 mem_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
298 mem_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
299 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
300 dep_info.memoryBarrierCount = 1;
301 dep_info.pMemoryBarriers = &mem_barrier;
302 fpCmdPipelineBarrier2KHR(cb, &dep_info);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700303
304 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_c.handle(), buffer_b.handle(), 1, &region);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700305
306 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
307 mem_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR; // Protect C but not B
308 mem_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
309 fpCmdPipelineBarrier2KHR(cb, &dep_info);
310 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_c.handle(), 1, &region);
311 m_errorMonitor->VerifyFound();
312
313 m_commandBuffer->end();
314 }
315}
316
Jeremy Gebben170781d2020-11-19 16:21:21 -0700317TEST_F(VkSyncValTest, SyncCopyOptimalImageHazards) {
318 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
319 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
320
321 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
322 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
323 VkImageObj image_a(m_device);
324 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
325 image_a.Init(image_ci);
326 ASSERT_TRUE(image_a.initialized());
327
328 VkImageObj image_b(m_device);
329 image_b.Init(image_ci);
330 ASSERT_TRUE(image_b.initialized());
331
332 VkImageObj image_c(m_device);
333 image_c.Init(image_ci);
334 ASSERT_TRUE(image_c.initialized());
335
336 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 2};
337 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
338 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
339 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
340 VkOffset3D zero_offset{0, 0, 0};
341 VkOffset3D half_offset{64, 64, 0};
342 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
343 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
344
345 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
346 VkImageCopy region_0_to_0 = {layers_0, zero_offset, layers_0, zero_offset, full_extent};
347 VkImageCopy region_0_to_1 = {layers_0, zero_offset, layers_1, zero_offset, full_extent};
348 VkImageCopy region_1_to_1 = {layers_1, zero_offset, layers_1, zero_offset, full_extent};
349 VkImageCopy region_0_front = {layers_0, zero_offset, layers_0, zero_offset, half_extent};
350 VkImageCopy region_0_back = {layers_0, half_offset, layers_0, half_offset, half_extent};
351
352 m_commandBuffer->begin();
353
354 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
355 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
356 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
357
358 auto cb = m_commandBuffer->handle();
359
360 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
361
362 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
363 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
364 m_errorMonitor->VerifyFound();
365
366 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700367 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700368 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
369 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
370 image_barrier.image = image_a.handle();
371 image_barrier.subresourceRange = full_subresource_range;
372 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
373 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
374 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
375 &image_barrier);
376
Jeremy Gebben170781d2020-11-19 16:21:21 -0700377 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_0);
378 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_1_to_1);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700379
380 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
381 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_1);
382 m_errorMonitor->VerifyFound();
383
384 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
385 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
386 m_errorMonitor->VerifyFound();
387
388 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
389 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
390
391 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700392 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700393 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
394 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
395 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
396 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700397 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700398
399 // Use barrier to protect last reader, but not last writer...
400 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
401 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
402 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
403 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
404 nullptr);
405 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
406 m_errorMonitor->VerifyFound();
407
408 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
409 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
410 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
411 m_errorMonitor->VerifyFound();
412
Jeremy Gebben170781d2020-11-19 16:21:21 -0700413 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700414
415 m_commandBuffer->end();
416
John Zulaufe972b752021-05-04 15:47:17 -0600417 // Test secondary command buffers
418 // Create secondary buffers to use
John Zulaufe972b752021-05-04 15:47:17 -0600419 VkCommandBufferObj secondary_cb1(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
420 VkCommandBuffer scb1 = secondary_cb1.handle();
421 secondary_cb1.begin();
422 vk::CmdCopyImage(scb1, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
423 secondary_cb1.end();
John Zulaufe972b752021-05-04 15:47:17 -0600424
425 auto record_primary = [&]() {
426 m_commandBuffer->reset();
427 m_commandBuffer->begin();
428 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
429 vk::CmdExecuteCommands(cb, 1, &scb1);
430 m_commandBuffer->end();
431 };
432
433 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
434 record_primary();
435 m_errorMonitor->VerifyFound();
436
John Zulaufe972b752021-05-04 15:47:17 -0600437 // With a barrier...
438 secondary_cb1.reset();
439 secondary_cb1.begin();
440 vk::CmdPipelineBarrier(scb1, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
441 nullptr);
442 vk::CmdCopyImage(scb1, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
443 secondary_cb1.end();
444 record_primary();
John Zulaufe972b752021-05-04 15:47:17 -0600445
446 auto image_transition_barrier = image_barrier;
447 image_transition_barrier.image = image_a.handle();
448 image_transition_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
449 image_transition_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
paul-lunargb01fd292022-08-24 16:59:08 +0200450 image_transition_barrier.srcAccessMask = 0;
451 image_transition_barrier.dstAccessMask = 0;
John Zulaufe972b752021-05-04 15:47:17 -0600452
John Zulaufe972b752021-05-04 15:47:17 -0600453 secondary_cb1.reset();
454 secondary_cb1.begin();
455 // Use the wrong stage, get an error
456 vk::CmdPipelineBarrier(scb1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, nullptr, 0, nullptr, 1,
457 &image_transition_barrier);
458 secondary_cb1.end();
John Zulaufe972b752021-05-04 15:47:17 -0600459
460 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
461 record_primary();
462 m_errorMonitor->VerifyFound();
463
464 // CmdResolveImage hazard testing
Jeremy Gebben170781d2020-11-19 16:21:21 -0700465 VkImageFormatProperties formProps = {{0, 0, 0}, 0, 0, 0, 0};
466 vk::GetPhysicalDeviceImageFormatProperties(m_device->phy().handle(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D,
467 VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, 0, &formProps);
468
469 if (!(formProps.sampleCounts & VK_SAMPLE_COUNT_2_BIT)) {
470 printf("%s CmdResolveImage Test requires unsupported VK_SAMPLE_COUNT_2_BIT feature. Skipped.\n", kSkipPrefix);
471 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -0700472 VkImageObj image_s2_a(m_device), image_s2_b(m_device);
473 image_ci.samples = VK_SAMPLE_COUNT_2_BIT;
474 image_s2_a.Init(image_ci);
475 ASSERT_TRUE(image_s2_a.initialized());
476
477 image_s2_b.Init(image_ci);
478 ASSERT_TRUE(image_s2_b.initialized());
479
480 VkImageResolve r_full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
481
482 m_commandBuffer->reset();
483 m_commandBuffer->begin();
484 image_s2_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
485 image_s2_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
486 vk::CmdResolveImage(cb, image_s2_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
487 &r_full_region);
488 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700489
490 m_commandBuffer->reset();
491 m_commandBuffer->begin();
492 vk::CmdCopyImage(cb, image_s2_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s2_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
493 &full_region);
494 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
495
496 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
497 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
498 vk::CmdResolveImage(cb, image_s2_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
499 &r_full_region);
500 m_errorMonitor->VerifyFound();
501
502 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
503 vk::CmdResolveImage(cb, image_s2_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
504 &r_full_region);
505 m_errorMonitor->VerifyFound();
506 m_commandBuffer->end();
507 }
508}
509
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700510TEST_F(VkSyncValTest, Sync2CopyOptimalImageHazards) {
511 SetTargetApiVersion(VK_API_VERSION_1_2);
512 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
513 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME)) {
514 m_device_extension_names.push_back(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
515 } else {
sjfricke20f4a872022-08-12 08:28:05 +0900516 GTEST_SKIP() << "Synchronization2 not supported";
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700517 }
518
519 if (!CheckSynchronization2SupportAndInitState(this)) {
sjfricke20f4a872022-08-12 08:28:05 +0900520 GTEST_SKIP() << "Synchronization2 not supported";
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700521 }
522 auto fpCmdPipelineBarrier2KHR = (PFN_vkCmdPipelineBarrier2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPipelineBarrier2KHR");
523
524 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
525 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
526 VkImageObj image_a(m_device);
527 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
528 image_a.Init(image_ci);
529 ASSERT_TRUE(image_a.initialized());
530
531 VkImageObj image_b(m_device);
532 image_b.Init(image_ci);
533 ASSERT_TRUE(image_b.initialized());
534
535 VkImageObj image_c(m_device);
536 image_c.Init(image_ci);
537 ASSERT_TRUE(image_c.initialized());
538
539 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 2};
540 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
541 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
542 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
543 VkOffset3D zero_offset{0, 0, 0};
544 VkOffset3D half_offset{64, 64, 0};
545 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
546 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
547
548 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
549 VkImageCopy region_0_to_0 = {layers_0, zero_offset, layers_0, zero_offset, full_extent};
550 VkImageCopy region_0_to_1 = {layers_0, zero_offset, layers_1, zero_offset, full_extent};
551 VkImageCopy region_1_to_1 = {layers_1, zero_offset, layers_1, zero_offset, full_extent};
552 VkImageCopy region_0_front = {layers_0, zero_offset, layers_0, zero_offset, half_extent};
553 VkImageCopy region_0_back = {layers_0, half_offset, layers_0, half_offset, half_extent};
554
555 m_commandBuffer->begin();
556
557 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
558 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
559 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
560
561 auto cb = m_commandBuffer->handle();
562
563 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
564
565 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
566 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
567 m_errorMonitor->VerifyFound();
568
569 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
570 {
571 auto image_barrier = lvl_init_struct<VkImageMemoryBarrier2KHR>();
572 image_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
573 image_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
574 image_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR;
575 image_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
576 image_barrier.image = image_a.handle();
577 image_barrier.subresourceRange = full_subresource_range;
578 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
579 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
580 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
581 dep_info.imageMemoryBarrierCount = 1;
582 dep_info.pImageMemoryBarriers = &image_barrier;
583 fpCmdPipelineBarrier2KHR(cb, &dep_info);
584 }
585
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700586 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_0);
587 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_1_to_1);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700588
589 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
590 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_1);
591 m_errorMonitor->VerifyFound();
592
593 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
594 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
595 m_errorMonitor->VerifyFound();
596
597 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
598 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
599
600 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
601 {
602 auto mem_barrier = lvl_init_struct<VkMemoryBarrier2KHR>();
603 mem_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
604 mem_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
605 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
606 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
607 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
608 dep_info.memoryBarrierCount = 1;
609 dep_info.pMemoryBarriers = &mem_barrier;
610 fpCmdPipelineBarrier2KHR(cb, &dep_info);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700611 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700612
613 // Use barrier to protect last reader, but not last writer...
614 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
615 mem_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR; // Protects C but not B
616 mem_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
617 fpCmdPipelineBarrier2KHR(cb, &dep_info);
618 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
619 m_errorMonitor->VerifyFound();
620 }
621
622 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
623 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
624 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
625 m_errorMonitor->VerifyFound();
626
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700627 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_back);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700628
629 m_commandBuffer->end();
630}
631
Jeremy Gebben170781d2020-11-19 16:21:21 -0700632TEST_F(VkSyncValTest, SyncCopyOptimalMultiPlanarHazards) {
633 // TODO: Add code to enable sync validation
634 // Enable KHR multiplane req'd extensions
635 bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
636 VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
637 if (mp_extensions) {
638 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
639 }
640 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700641 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700642 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
643 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
644 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
645 if (mp_extensions) {
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700646 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700647 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
648 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
649 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
650 } else {
651 printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
652 return;
653 }
654
655 ASSERT_NO_FATAL_FAILURE(InitState());
656
657 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
658 VkFormat format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
659 VkImageObj image_a(m_device);
660 const auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
661 // Verify format
662 bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), image_ci,
663 VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT);
664 if (!supported) {
665 printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
666 return; // Assume there's low ROI on searching for different mp formats
667 }
668
669 image_a.Init(image_ci);
670 VkImageObj image_b(m_device);
671 image_b.Init(image_ci);
672 VkImageObj image_c(m_device);
673 image_c.Init(image_ci);
674
675 VkImageSubresourceLayers layer_all_plane0{VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, 0, 0, 2};
676 VkImageSubresourceLayers layer0_plane0{VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, 0, 0, 1};
677 VkImageSubresourceLayers layer0_plane1{VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, 0, 0, 1};
678 VkImageSubresourceLayers layer1_plane1{VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, 0, 1, 1};
679 VkImageSubresourceRange full_subresource_range{
680 VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR, 0, 1, 0, 2};
681 VkOffset3D zero_offset{0, 0, 0};
682 VkOffset3D one_four_offset{32, 32, 0};
683 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
684 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
685 VkExtent3D one_four_extent{32, 32, 1}; // <-- image type is 2D
686
687 VkImageCopy region_all_plane0_to_all_plane0 = {layer_all_plane0, zero_offset, layer_all_plane0, zero_offset, full_extent};
688 VkImageCopy region_layer0_plane0_to_layer0_plane0 = {layer0_plane0, zero_offset, layer0_plane0, zero_offset, full_extent};
689 VkImageCopy region_layer0_plane0_to_layer0_plane1 = {layer0_plane0, zero_offset, layer0_plane1, zero_offset, half_extent};
690 VkImageCopy region_layer1_plane1_to_layer1_plane1_front = {layer1_plane1, zero_offset, layer1_plane1, zero_offset,
691 one_four_extent};
692 VkImageCopy region_layer1_plane1_to_layer1_plane1_back = {layer1_plane1, one_four_offset, layer1_plane1, one_four_offset,
693 one_four_extent};
694
695 m_commandBuffer->begin();
696
697 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
698 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
699 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
700
701 auto cb = m_commandBuffer->handle();
702
703 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
704 &region_all_plane0_to_all_plane0);
705
706 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
707 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
708 &region_all_plane0_to_all_plane0);
709 m_errorMonitor->VerifyFound();
710
711 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700712 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700713 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
714 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
715 image_barrier.image = image_a.handle();
716 image_barrier.subresourceRange = full_subresource_range;
717 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
718 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
719 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
720 &image_barrier);
721
Jeremy Gebben170781d2020-11-19 16:21:21 -0700722 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
723 &region_layer0_plane0_to_layer0_plane0);
724 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
725 &region_layer0_plane0_to_layer0_plane1);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700726
727 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
728 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
729 &region_layer0_plane0_to_layer0_plane1);
730 m_errorMonitor->VerifyFound();
731
732 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
733 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
734 &region_all_plane0_to_all_plane0);
735 m_errorMonitor->VerifyFound();
736
737 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
738 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
739
740 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700741 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700742 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
743 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
744 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
745 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700746 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
747 &region_all_plane0_to_all_plane0);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700748
749 // Use barrier to protect last reader, but not last writer...
750 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
751 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
752 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
753 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
754 nullptr);
755 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
756 &region_all_plane0_to_all_plane0);
757 m_errorMonitor->VerifyFound();
758
759 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
760 &region_layer1_plane1_to_layer1_plane1_front);
761 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
762 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
763 &region_layer1_plane1_to_layer1_plane1_front);
764 m_errorMonitor->VerifyFound();
765
Jeremy Gebben170781d2020-11-19 16:21:21 -0700766 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
767 &region_layer1_plane1_to_layer1_plane1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700768
769 m_commandBuffer->end();
770}
771
772TEST_F(VkSyncValTest, SyncCopyLinearImageHazards) {
773 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
774 ASSERT_NO_FATAL_FAILURE(InitState());
775
776 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
777 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
778 VkImageObj image_a(m_device);
779 const auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_LINEAR);
780 image_a.Init(image_ci);
781 VkImageObj image_b(m_device);
782 image_b.Init(image_ci);
783 VkImageObj image_c(m_device);
784 image_c.Init(image_ci);
785
786 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
787 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
788 VkOffset3D zero_offset{0, 0, 0};
789 VkOffset3D half_offset{64, 64, 0};
790 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
791 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
792
793 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
794 VkImageCopy region_front = {layers_all, zero_offset, layers_all, zero_offset, half_extent};
795 VkImageCopy region_back = {layers_all, half_offset, layers_all, half_offset, half_extent};
796
797 m_commandBuffer->begin();
798
799 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
800 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
801 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
802
803 auto cb = m_commandBuffer->handle();
804
805 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
806
807 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
808 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
809 m_errorMonitor->VerifyFound();
810
811 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700812 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700813 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
814 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
815 image_barrier.image = image_b.handle();
816 image_barrier.subresourceRange = full_subresource_range;
817 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
818 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
819 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
820 &image_barrier);
821
Jeremy Gebben170781d2020-11-19 16:21:21 -0700822 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700823
824 // Use barrier to protect last reader, but not last writer...
825 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
826 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
827 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
828 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
829 &image_barrier);
830 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
831 m_errorMonitor->VerifyFound();
832
833 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_front);
834 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
835 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_front);
836 m_errorMonitor->VerifyFound();
837
Jeremy Gebben170781d2020-11-19 16:21:21 -0700838 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700839}
840
841TEST_F(VkSyncValTest, SyncCopyLinearMultiPlanarHazards) {
842 // TODO: Add code to enable sync validation
843 // Enable KHR multiplane req'd extensions
844 bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
845 VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
846 if (mp_extensions) {
847 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
848 }
849 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700850 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700851 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
852 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
853 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
854 if (mp_extensions) {
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700855 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700856 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
857 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
858 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
859 } else {
860 printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
861 return;
862 }
863
864 ASSERT_NO_FATAL_FAILURE(InitState());
865
866 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
867 VkFormat format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
868 VkImageObj image_a(m_device);
869 const auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_LINEAR);
870 // Verify format
871 bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), image_ci,
872 VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT);
873 if (!supported) {
874 printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
875 return; // Assume there's low ROI on searching for different mp formats
876 }
877
878 image_a.Init(image_ci);
879 VkImageObj image_b(m_device);
880 image_b.Init(image_ci);
881 VkImageObj image_c(m_device);
882 image_c.Init(image_ci);
883
884 VkImageSubresourceLayers layer_all_plane0{VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, 0, 0, 1};
885 VkImageSubresourceLayers layer_all_plane1{VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, 0, 0, 1};
886 VkImageSubresourceRange full_subresource_range{
887 VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR, 0, 1, 0, 1};
888 VkOffset3D zero_offset{0, 0, 0};
889 VkOffset3D one_four_offset{32, 32, 0};
890 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
891 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
892 VkExtent3D one_four_extent{32, 32, 1}; // <-- image type is 2D
893
894 VkImageCopy region_plane0_to_plane0 = {layer_all_plane0, zero_offset, layer_all_plane0, zero_offset, full_extent};
895 VkImageCopy region_plane0_to_plane1 = {layer_all_plane0, zero_offset, layer_all_plane1, zero_offset, half_extent};
896 VkImageCopy region_plane1_to_plane1_front = {layer_all_plane1, zero_offset, layer_all_plane1, zero_offset, one_four_extent};
897 VkImageCopy region_plane1_to_plane1_back = {layer_all_plane1, one_four_offset, layer_all_plane1, one_four_offset,
898 one_four_extent};
899
900 m_commandBuffer->begin();
901
902 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
903 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
904 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
905
906 auto cb = m_commandBuffer->handle();
907
908 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
909 &region_plane0_to_plane0);
910
911 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
912 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
913 &region_plane0_to_plane0);
914 m_errorMonitor->VerifyFound();
915
916 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700917 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700918 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
919 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
920 image_barrier.image = image_a.handle();
921 image_barrier.subresourceRange = full_subresource_range;
922 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
923 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
924 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
925 &image_barrier);
926
Jeremy Gebben170781d2020-11-19 16:21:21 -0700927 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
928 &region_plane0_to_plane0);
929 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
930 &region_plane0_to_plane1);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700931
932 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
933 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
934 &region_plane0_to_plane1);
935 m_errorMonitor->VerifyFound();
936
937 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
938 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
939 &region_plane0_to_plane0);
940 m_errorMonitor->VerifyFound();
941
942 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
943 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
944
945 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700946 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700947 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
948 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
949 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
950 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700951 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
952 &region_plane0_to_plane0);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700953
954 // Use barrier to protect last reader, but not last writer...
955 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
956 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
957 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
958 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
959 nullptr);
960 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
961 &region_plane0_to_plane0);
962 m_errorMonitor->VerifyFound();
963
964 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
965 &region_plane1_to_plane1_front);
966 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
967 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
968 &region_plane1_to_plane1_front);
969 m_errorMonitor->VerifyFound();
970
Jeremy Gebben170781d2020-11-19 16:21:21 -0700971 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
972 &region_plane1_to_plane1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700973
974 m_commandBuffer->end();
975}
976
977TEST_F(VkSyncValTest, SyncCopyBufferImageHazards) {
978 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
979 ASSERT_NO_FATAL_FAILURE(InitState());
980
981 VkBufferObj buffer_a, buffer_b;
982 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
983 buffer_a.init_as_src_and_dst(*m_device, 2048, mem_prop);
984 buffer_b.init_as_src_and_dst(*m_device, 2048, mem_prop);
985
986 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
987 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
988 VkImageObj image_a(m_device), image_b(m_device);
989 const auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
990 image_a.Init(image_ci);
991 image_b.Init(image_ci);
992
993 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
994 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
995 VkOffset3D zero_offset{0, 0, 0};
996 VkOffset3D half_offset{16, 16, 0};
997 VkExtent3D half_extent{16, 16, 1}; // <-- image type is 2D
998
999 VkBufferImageCopy region_buffer_front_image_0_front = {0, 16, 16, layers_0, zero_offset, half_extent};
1000 VkBufferImageCopy region_buffer_front_image_1_front = {0, 16, 16, layers_1, zero_offset, half_extent};
1001 VkBufferImageCopy region_buffer_front_image_1_back = {0, 16, 16, layers_1, half_offset, half_extent};
1002 VkBufferImageCopy region_buffer_back_image_0_front = {1024, 16, 16, layers_0, zero_offset, half_extent};
1003 VkBufferImageCopy region_buffer_back_image_0_back = {1024, 16, 16, layers_0, half_offset, half_extent};
1004 VkBufferImageCopy region_buffer_back_image_1_front = {1024, 16, 16, layers_1, zero_offset, half_extent};
1005 VkBufferImageCopy region_buffer_back_image_1_back = {1024, 16, 16, layers_1, half_offset, half_extent};
1006
1007 m_commandBuffer->begin();
1008 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1009 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1010
1011 auto cb = m_commandBuffer->handle();
1012 vk::CmdCopyBufferToImage(cb, buffer_a.handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1013 &region_buffer_front_image_0_front);
1014
1015 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1016 vk::CmdCopyBufferToImage(cb, buffer_a.handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1017 &region_buffer_front_image_0_front);
1018 m_errorMonitor->VerifyFound();
1019
1020 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1021 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1022 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1023 &region_buffer_front_image_0_front);
1024 m_errorMonitor->VerifyFound();
1025
1026 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1027 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1028 &region_buffer_back_image_0_front);
1029 m_errorMonitor->VerifyFound();
1030
1031 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1032 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1033 &region_buffer_front_image_1_front);
1034 m_errorMonitor->VerifyFound();
1035
1036 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1037 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1038 &region_buffer_front_image_1_back);
1039 m_errorMonitor->VerifyFound();
1040
Jeremy Gebben170781d2020-11-19 16:21:21 -07001041 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1, &region_buffer_back_image_0_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001042
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001043 auto buffer_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001044 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1045 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1046 buffer_barrier.buffer = buffer_a.handle();
1047 buffer_barrier.offset = 1024;
paul-lunargb01fd292022-08-24 16:59:08 +02001048 buffer_barrier.size = VK_WHOLE_SIZE;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001049 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1050 nullptr);
1051
Jeremy Gebben170781d2020-11-19 16:21:21 -07001052 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1053 &region_buffer_back_image_1_front);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001054
1055 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1056 nullptr);
1057
Jeremy Gebben170781d2020-11-19 16:21:21 -07001058 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1, &region_buffer_back_image_1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001059
1060 vk::CmdCopyImageToBuffer(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_b.handle(), 1,
1061 &region_buffer_front_image_0_front);
1062
1063 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1064 vk::CmdCopyImageToBuffer(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_b.handle(), 1,
1065 &region_buffer_front_image_0_front);
1066 m_errorMonitor->VerifyFound();
1067
1068 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1069 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1070 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1071 &region_buffer_front_image_0_front);
1072 m_errorMonitor->VerifyFound();
1073
1074 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1075 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1076 &region_buffer_back_image_0_front);
1077 m_errorMonitor->VerifyFound();
1078
1079 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1080 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1081 &region_buffer_front_image_1_front);
1082 m_errorMonitor->VerifyFound();
1083
1084 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1085 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1086 &region_buffer_front_image_1_back);
1087 m_errorMonitor->VerifyFound();
1088
Jeremy Gebben170781d2020-11-19 16:21:21 -07001089 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_buffer_back_image_0_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001090
1091 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1092 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1093 buffer_barrier.buffer = buffer_b.handle();
1094 buffer_barrier.offset = 1024;
paul-lunargb01fd292022-08-24 16:59:08 +02001095 buffer_barrier.size = VK_WHOLE_SIZE;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001096 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1097 nullptr);
1098
Jeremy Gebben170781d2020-11-19 16:21:21 -07001099 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1100 &region_buffer_back_image_1_front);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001101
1102 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1103 nullptr);
1104
Jeremy Gebben170781d2020-11-19 16:21:21 -07001105 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_buffer_back_image_1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001106
1107 m_commandBuffer->end();
1108}
1109
1110TEST_F(VkSyncValTest, SyncBlitImageHazards) {
1111 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1112 ASSERT_NO_FATAL_FAILURE(InitState());
1113
1114 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1115 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1116 VkImageObj image_a(m_device), image_b(m_device);
1117 const auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
1118 image_a.Init(image_ci);
1119 image_b.Init(image_ci);
1120
1121 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1122 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
1123 VkOffset3D zero_offset{0, 0, 0};
1124 VkOffset3D half_0_offset{16, 16, 0};
1125 VkOffset3D half_1_offset{16, 16, 1};
1126 VkOffset3D full_offset{32, 32, 1};
1127 VkImageBlit region_0_front_1_front = {layers_0, {zero_offset, half_1_offset}, layers_1, {zero_offset, half_1_offset}};
1128 VkImageBlit region_1_front_0_front = {layers_1, {zero_offset, half_1_offset}, layers_0, {zero_offset, half_1_offset}};
1129 VkImageBlit region_1_back_0_back = {layers_1, {half_0_offset, full_offset}, layers_0, {half_0_offset, full_offset}};
1130
1131 m_commandBuffer->begin();
1132 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1133 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1134
1135 auto cb = m_commandBuffer->handle();
1136
1137 vk::CmdBlitImage(cb, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1138 &region_0_front_1_front, VK_FILTER_NEAREST);
1139
1140 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1141 vk::CmdBlitImage(cb, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1142 &region_0_front_1_front, VK_FILTER_NEAREST);
1143 m_errorMonitor->VerifyFound();
1144
1145 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1146 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1147 vk::CmdBlitImage(cb, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1148 &region_1_front_0_front, VK_FILTER_NEAREST);
1149 m_errorMonitor->VerifyFound();
1150
Jeremy Gebben170781d2020-11-19 16:21:21 -07001151 vk::CmdBlitImage(cb, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1152 &region_1_back_0_back, VK_FILTER_NEAREST);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001153
1154 m_commandBuffer->end();
1155}
1156
1157TEST_F(VkSyncValTest, SyncRenderPassBeginTransitionHazard) {
1158 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1159 ASSERT_NO_FATAL_FAILURE(InitState());
John Zulaufbb373682021-10-05 17:21:40 -06001160 const VkSubpassDependency external_subpass_dependency = {VK_SUBPASS_EXTERNAL,
1161 0,
1162 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
1163 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
1164 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1165 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1166 VK_DEPENDENCY_BY_REGION_BIT};
1167 m_additionalSubpassDependencies.push_back(external_subpass_dependency);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001168 ASSERT_NO_FATAL_FAILURE(InitRenderTarget(2));
1169
1170 // Render Target Information
1171 auto width = static_cast<uint32_t>(m_width);
1172 auto height = static_cast<uint32_t>(m_height);
1173 auto *rt_0 = m_renderTargets[0].get();
1174 auto *rt_1 = m_renderTargets[1].get();
1175
1176 // Other buffers with which to interact
1177 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1178 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1179 VkImageObj image_a(m_device), image_b(m_device);
1180 const auto image_ci = VkImageObj::ImageCreateInfo2D(width, height, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
1181 image_a.Init(image_ci);
1182 image_b.Init(image_ci);
1183
1184 VkOffset3D zero_offset{0, 0, 0};
1185 VkExtent3D full_extent{width, height, 1}; // <-- image type is 2D
1186 VkImageSubresourceLayers layer_color{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1187 VkImageCopy region_to_copy = {layer_color, zero_offset, layer_color, zero_offset, full_extent};
1188
1189 auto cb = m_commandBuffer->handle();
1190
Jeremy Gebben170781d2020-11-19 16:21:21 -07001191 m_commandBuffer->begin();
1192 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1193 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1194 rt_0->SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1195 rt_1->SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1196
1197 rt_0->SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1198 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, rt_0->handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_to_copy);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001199
1200 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1201 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo); // This fails so the driver call is skip and no end is valid
1202 m_errorMonitor->VerifyFound();
1203
Jeremy Gebben170781d2020-11-19 16:21:21 -07001204 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
1205 VkImageSubresourceRange rt_full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001206 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001207 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
John Zulaufbb373682021-10-05 17:21:40 -06001208 image_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001209 image_barrier.image = rt_0->handle();
1210 image_barrier.subresourceRange = rt_full_subresource_range;
1211 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
1212 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
John Zulaufbb373682021-10-05 17:21:40 -06001213 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0,
1214 nullptr, 1, &image_barrier);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001215 vk::CmdCopyImage(cb, rt_1->handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_to_copy);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001216
1217 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1218 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo); // This fails so the driver call is skip and no end is valid
1219 m_errorMonitor->VerifyFound();
1220
Jeremy Gebben170781d2020-11-19 16:21:21 -07001221 // A global execution barrier that the implict external dependency can chain with should work...
1222 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, nullptr, 0, nullptr, 0,
1223 nullptr);
1224
1225 // With the barrier above, the layout transition has a chained execution sync operation, and the default
1226 // implict VkSubpassDependency safes the load op clear vs. the layout transition...
1227 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1228 m_commandBuffer->EndRenderPass();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001229}
1230
1231TEST_F(VkSyncValTest, SyncCmdDispatchDrawHazards) {
1232 // TODO: Add code to enable sync validation
1233 SetTargetApiVersion(VK_API_VERSION_1_2);
1234
1235 // Enable VK_KHR_draw_indirect_count for KHR variants
1236 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
sfricke-samsung6fc3e322022-02-15 22:41:29 -08001237 VkPhysicalDeviceVulkan12Features features12 = LvlInitStruct<VkPhysicalDeviceVulkan12Features>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001238 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
1239 m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
1240 if (DeviceValidationVersion() >= VK_API_VERSION_1_2) {
1241 features12.drawIndirectCount = VK_TRUE;
1242 }
1243 }
1244 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features12, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
1245 bool has_khr_indirect = DeviceExtensionEnabled(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
1246 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1247
1248 VkImageUsageFlags image_usage_combine = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT |
1249 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1250 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1251 VkImageObj image_c_a(m_device), image_c_b(m_device);
1252 const auto image_c_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_combine, VK_IMAGE_TILING_OPTIMAL);
1253 image_c_a.Init(image_c_ci);
1254 image_c_b.Init(image_c_ci);
1255
1256 VkImageView imageview_c = image_c_a.targetView(format);
1257 VkImageUsageFlags image_usage_storage =
1258 VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1259 VkImageObj image_s_a(m_device), image_s_b(m_device);
1260 const auto image_s_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_storage, VK_IMAGE_TILING_OPTIMAL);
1261 image_s_a.Init(image_s_ci);
1262 image_s_b.Init(image_s_ci);
1263 image_s_a.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1264 image_s_b.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1265
1266 VkImageView imageview_s = image_s_a.targetView(format);
1267
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001268 vk_testing::Sampler sampler_s, sampler_c;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001269 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001270 sampler_s.init(*m_device, sampler_ci);
1271 sampler_c.init(*m_device, sampler_ci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001272
1273 VkBufferObj buffer_a, buffer_b;
1274 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1275 VkBufferUsageFlags buffer_usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT |
1276 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1277 buffer_a.init(*m_device, buffer_a.create_info(2048, buffer_usage, nullptr), mem_prop);
1278 buffer_b.init(*m_device, buffer_b.create_info(2048, buffer_usage, nullptr), mem_prop);
1279
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001280 vk_testing::BufferView bufferview;
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001281 auto bvci = LvlInitStruct<VkBufferViewCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001282 bvci.buffer = buffer_a.handle();
1283 bvci.format = VK_FORMAT_R32_SFLOAT;
1284 bvci.offset = 0;
1285 bvci.range = VK_WHOLE_SIZE;
1286
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001287 bufferview.init(*m_device, bvci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001288
1289 OneOffDescriptorSet descriptor_set(m_device,
1290 {
1291 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
1292 {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
1293 {2, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
1294 {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
1295 });
1296
sfricke-samsung36428462021-02-10 01:23:34 -08001297 descriptor_set.WriteDescriptorBufferInfo(0, buffer_a.handle(), 0, 2048);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001298 descriptor_set.WriteDescriptorImageInfo(1, imageview_c, sampler_c.handle(), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
Jeremy Gebben170781d2020-11-19 16:21:21 -07001299 VK_IMAGE_LAYOUT_GENERAL);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001300 descriptor_set.WriteDescriptorImageInfo(2, imageview_s, sampler_s.handle(), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_IMAGE_LAYOUT_GENERAL);
1301 descriptor_set.WriteDescriptorBufferView(3, bufferview.handle());
Jeremy Gebben170781d2020-11-19 16:21:21 -07001302 descriptor_set.UpdateDescriptorSets();
1303
1304 // Dispatch
sjfricke394227a2022-06-20 16:47:38 +09001305 const char *csSource = R"glsl(
sfricke-samsung1c0b96a2021-07-08 22:24:09 -07001306 #version 450
1307 layout(set=0, binding=0) uniform foo { float x; } ub0;
1308 layout(set=0, binding=1) uniform sampler2D cis1;
1309 layout(set=0, binding=2, rgba8) uniform readonly image2D si2;
1310 layout(set=0, binding=3, r32f) uniform readonly imageBuffer stb3;
1311 void main(){
1312 vec4 vColor4;
1313 vColor4.x = ub0.x;
1314 vColor4 = texture(cis1, vec2(0));
1315 vColor4 = imageLoad(si2, ivec2(0));
1316 vColor4 = imageLoad(stb3, 0);
1317 }
1318 )glsl";
Jeremy Gebben170781d2020-11-19 16:21:21 -07001319
John Zulaufbe8562b2020-12-15 14:21:01 -07001320 VkEventObj event;
1321 event.init(*m_device, VkEventObj::create_info(0));
1322 VkEvent event_handle = event.handle();
1323
Jeremy Gebben170781d2020-11-19 16:21:21 -07001324 CreateComputePipelineHelper pipe(*this);
1325 pipe.InitInfo();
sfricke-samsungae54c1e2022-01-21 05:35:21 -08001326 pipe.cs_.reset(new VkShaderObj(this, csSource, VK_SHADER_STAGE_COMPUTE_BIT));
Jeremy Gebben170781d2020-11-19 16:21:21 -07001327 pipe.InitState();
1328 pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
1329 pipe.CreateComputePipeline();
1330
1331 m_commandBuffer->begin();
1332
1333 VkBufferCopy buffer_region = {0, 0, 2048};
1334 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_a.handle(), 1, &buffer_region);
1335
1336 VkImageSubresourceLayers layer{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1337 VkOffset3D zero_offset{0, 0, 0};
1338 VkExtent3D full_extent{16, 16, 1};
1339 VkImageCopy image_region = {layer, zero_offset, layer, zero_offset, full_extent};
1340 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
1341 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
1342 vk::CmdCopyImage(m_commandBuffer->handle(), image_s_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s_a.handle(),
1343 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
1344
1345 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1346 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1347 &descriptor_set.set_, 0, nullptr);
1348
1349 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1350 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1351 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1352 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1353 vk::CmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
1354 m_errorMonitor->VerifyFound();
1355
1356 m_commandBuffer->end();
1357 m_commandBuffer->reset();
1358 m_commandBuffer->begin();
1359
1360 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1361 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1362 &descriptor_set.set_, 0, nullptr);
1363 vk::CmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
1364
1365 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1366 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_a.handle(), 1, &buffer_region);
1367 m_errorMonitor->VerifyFound();
1368
1369 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1370 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
1371 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
1372 m_errorMonitor->VerifyFound();
1373
1374 m_commandBuffer->end();
1375 m_commandBuffer->reset();
1376
1377 // DispatchIndirect
Jeremy Gebben170781d2020-11-19 16:21:21 -07001378 VkBufferObj buffer_dispatchIndirect, buffer_dispatchIndirect2;
1379 buffer_usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1380 buffer_dispatchIndirect.init(
1381 *m_device, buffer_dispatchIndirect.create_info(sizeof(VkDispatchIndirectCommand), buffer_usage, nullptr), mem_prop);
1382 buffer_dispatchIndirect2.init(
1383 *m_device, buffer_dispatchIndirect2.create_info(sizeof(VkDispatchIndirectCommand), buffer_usage, nullptr), mem_prop);
1384 m_commandBuffer->begin();
1385 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1386 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1387 &descriptor_set.set_, 0, nullptr);
1388 vk::CmdDispatchIndirect(m_commandBuffer->handle(), buffer_dispatchIndirect.handle(), 0);
1389 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001390
1391 m_commandBuffer->reset();
1392 m_commandBuffer->begin();
1393
1394 buffer_region = {0, 0, sizeof(VkDispatchIndirectCommand)};
1395 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_dispatchIndirect2.handle(), buffer_dispatchIndirect.handle(), 1,
1396 &buffer_region);
1397 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1398 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1399 &descriptor_set.set_, 0, nullptr);
1400 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1401 vk::CmdDispatchIndirect(m_commandBuffer->handle(), buffer_dispatchIndirect.handle(), 0);
1402 m_errorMonitor->VerifyFound();
1403 m_commandBuffer->end();
1404
1405 // Draw
Jeremy Gebben170781d2020-11-19 16:21:21 -07001406 const float vbo_data[3] = {1.f, 0.f, 1.f};
1407 VkVertexInputAttributeDescription VertexInputAttributeDescription = {0, 0, VK_FORMAT_R32G32B32_SFLOAT, sizeof(vbo_data)};
1408 VkVertexInputBindingDescription VertexInputBindingDescription = {0, sizeof(vbo_data), VK_VERTEX_INPUT_RATE_VERTEX};
1409 VkBufferObj vbo, vbo2;
1410 buffer_usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1411 vbo.init(*m_device, vbo.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
1412 vbo2.init(*m_device, vbo2.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
1413
sfricke-samsungae54c1e2022-01-21 05:35:21 -08001414 VkShaderObj vs(this, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT);
1415 VkShaderObj fs(this, csSource, VK_SHADER_STAGE_FRAGMENT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001416
1417 CreatePipelineHelper g_pipe(*this);
1418 g_pipe.InitInfo();
1419 g_pipe.InitState();
1420 g_pipe.vi_ci_.pVertexBindingDescriptions = &VertexInputBindingDescription;
1421 g_pipe.vi_ci_.vertexBindingDescriptionCount = 1;
1422 g_pipe.vi_ci_.pVertexAttributeDescriptions = &VertexInputAttributeDescription;
1423 g_pipe.vi_ci_.vertexAttributeDescriptionCount = 1;
1424 g_pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
1425 g_pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
1426 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
1427
1428 m_commandBuffer->reset();
1429 m_commandBuffer->begin();
1430 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1431 VkDeviceSize offset = 0;
1432 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1433
1434 VkViewport viewport = {0, 0, 16, 16, 0, 1};
1435 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1436 VkRect2D scissor = {{0, 0}, {16, 16}};
1437 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1438
1439 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1440 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1441 &descriptor_set.set_, 0, nullptr);
1442 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1443 m_commandBuffer->EndRenderPass();
1444 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001445
1446 m_commandBuffer->reset();
1447 m_commandBuffer->begin();
1448
1449 buffer_region = {0, 0, sizeof(vbo_data)};
1450 vk::CmdCopyBuffer(m_commandBuffer->handle(), vbo2.handle(), vbo.handle(), 1, &buffer_region);
1451
1452 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1453 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1454 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1455 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1456 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1457 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1458 &descriptor_set.set_, 0, nullptr);
1459
1460 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1461 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1462 m_errorMonitor->VerifyFound();
1463
1464 m_commandBuffer->EndRenderPass();
1465 m_commandBuffer->end();
1466
John Zulaufbe8562b2020-12-15 14:21:01 -07001467 // Repeat the draw test with a WaitEvent to protect it.
John Zulaufbe8562b2020-12-15 14:21:01 -07001468 m_commandBuffer->reset();
1469 m_commandBuffer->begin();
1470
1471 vk::CmdCopyBuffer(m_commandBuffer->handle(), vbo2.handle(), vbo.handle(), 1, &buffer_region);
1472
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001473 auto vbo_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
John Zulaufbe8562b2020-12-15 14:21:01 -07001474 vbo_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1475 vbo_barrier.dstAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
1476 vbo_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1477 vbo_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1478 vbo_barrier.buffer = vbo.handle();
1479 vbo_barrier.offset = buffer_region.dstOffset;
1480 vbo_barrier.size = buffer_region.size;
1481
1482 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
1483
1484 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1485 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1486 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1487 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1488 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1489 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1490 &descriptor_set.set_, 0, nullptr);
1491
1492 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, nullptr, 1,
1493 &vbo_barrier, 0, nullptr);
1494 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1495
1496 m_commandBuffer->EndRenderPass();
1497 m_commandBuffer->end();
John Zulaufbe8562b2020-12-15 14:21:01 -07001498
Jeremy Gebben170781d2020-11-19 16:21:21 -07001499 // DrawIndexed
Jeremy Gebben170781d2020-11-19 16:21:21 -07001500 const float ibo_data[3] = {0.f, 0.f, 0.f};
1501 VkBufferObj ibo, ibo2;
1502 buffer_usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1503 ibo.init(*m_device, ibo.create_info(sizeof(ibo_data), buffer_usage, nullptr), mem_prop);
1504 ibo2.init(*m_device, ibo2.create_info(sizeof(ibo_data), buffer_usage, nullptr), mem_prop);
1505
1506 m_commandBuffer->reset();
1507 m_commandBuffer->begin();
1508 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1509 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1510 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1511 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1512 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1513
1514 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1515 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1516 &descriptor_set.set_, 0, nullptr);
1517 m_commandBuffer->DrawIndexed(3, 1, 0, 0, 0);
1518 m_commandBuffer->EndRenderPass();
1519 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001520
1521 m_commandBuffer->reset();
1522 m_commandBuffer->begin();
1523
1524 buffer_region = {0, 0, sizeof(ibo_data)};
1525 vk::CmdCopyBuffer(m_commandBuffer->handle(), ibo2.handle(), ibo.handle(), 1, &buffer_region);
1526
1527 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1528 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1529 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1530 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1531 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1532 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1533 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1534 &descriptor_set.set_, 0, nullptr);
1535
1536 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1537 m_commandBuffer->DrawIndexed(3, 1, 0, 0, 0);
1538 m_errorMonitor->VerifyFound();
1539
1540 m_commandBuffer->EndRenderPass();
1541 m_commandBuffer->end();
1542
1543 // DrawIndirect
Jeremy Gebben170781d2020-11-19 16:21:21 -07001544 VkBufferObj buffer_drawIndirect, buffer_drawIndirect2;
1545 buffer_usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1546 buffer_drawIndirect.init(*m_device, buffer_drawIndirect.create_info(sizeof(VkDrawIndirectCommand), buffer_usage, nullptr),
1547 mem_prop);
1548 buffer_drawIndirect2.init(*m_device, buffer_drawIndirect2.create_info(sizeof(VkDrawIndirectCommand), buffer_usage, nullptr),
1549 mem_prop);
1550
1551 m_commandBuffer->reset();
1552 m_commandBuffer->begin();
1553 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1554 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1555 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1556 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1557
1558 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1559 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1560 &descriptor_set.set_, 0, nullptr);
1561 vk::CmdDrawIndirect(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, 1, sizeof(VkDrawIndirectCommand));
1562 m_commandBuffer->EndRenderPass();
1563 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001564
1565 m_commandBuffer->reset();
1566 m_commandBuffer->begin();
1567
1568 buffer_region = {0, 0, sizeof(VkDrawIndirectCommand)};
1569 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_drawIndirect2.handle(), buffer_drawIndirect.handle(), 1, &buffer_region);
1570
1571 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1572 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1573 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1574 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1575 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1576 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1577 &descriptor_set.set_, 0, nullptr);
1578
1579 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1580 vk::CmdDrawIndirect(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, 1, sizeof(VkDrawIndirectCommand));
1581 m_errorMonitor->VerifyFound();
1582
1583 m_commandBuffer->EndRenderPass();
1584 m_commandBuffer->end();
1585
1586 // DrawIndexedIndirect
Jeremy Gebben170781d2020-11-19 16:21:21 -07001587 VkBufferObj buffer_drawIndexedIndirect, buffer_drawIndexedIndirect2;
1588 buffer_usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1589 buffer_drawIndexedIndirect.init(
1590 *m_device, buffer_drawIndexedIndirect.create_info(sizeof(VkDrawIndexedIndirectCommand), buffer_usage, nullptr), mem_prop);
1591 buffer_drawIndexedIndirect2.init(
1592 *m_device, buffer_drawIndexedIndirect2.create_info(sizeof(VkDrawIndexedIndirectCommand), buffer_usage, nullptr), mem_prop);
1593
1594 m_commandBuffer->reset();
1595 m_commandBuffer->begin();
1596 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1597 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1598 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1599 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1600 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1601
1602 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1603 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1604 &descriptor_set.set_, 0, nullptr);
1605 vk::CmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, 1, sizeof(VkDrawIndexedIndirectCommand));
1606 m_commandBuffer->EndRenderPass();
1607 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001608
1609 m_commandBuffer->reset();
1610 m_commandBuffer->begin();
1611
1612 buffer_region = {0, 0, sizeof(VkDrawIndexedIndirectCommand)};
1613 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_drawIndexedIndirect2.handle(), buffer_drawIndexedIndirect.handle(), 1,
1614 &buffer_region);
1615
1616 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1617 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1618 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1619 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1620 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1621 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1622 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1623 &descriptor_set.set_, 0, nullptr);
1624
1625 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1626 vk::CmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer_drawIndexedIndirect.handle(), 0, 1,
1627 sizeof(VkDrawIndexedIndirectCommand));
1628 m_errorMonitor->VerifyFound();
1629
1630 m_commandBuffer->EndRenderPass();
1631 m_commandBuffer->end();
1632
1633 if (has_khr_indirect) {
1634 // DrawIndirectCount
1635 auto fpCmdDrawIndirectCountKHR =
1636 (PFN_vkCmdDrawIndirectCount)vk::GetDeviceProcAddr(m_device->device(), "vkCmdDrawIndirectCountKHR");
1637 if (!fpCmdDrawIndirectCountKHR) {
1638 printf("%s Test requires unsupported vkCmdDrawIndirectCountKHR feature. Skipped.\n", kSkipPrefix);
1639 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -07001640 VkBufferObj buffer_count, buffer_count2;
1641 buffer_usage =
1642 VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1643 buffer_count.init(*m_device, buffer_count.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1644 buffer_count2.init(*m_device, buffer_count2.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1645
1646 m_commandBuffer->reset();
1647 m_commandBuffer->begin();
1648 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1649 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1650 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1651 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1652
1653 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1654 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1655 0, 1, &descriptor_set.set_, 0, nullptr);
1656 fpCmdDrawIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, buffer_count.handle(), 0, 1,
1657 sizeof(VkDrawIndirectCommand));
1658 m_commandBuffer->EndRenderPass();
1659 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001660
1661 m_commandBuffer->reset();
1662 m_commandBuffer->begin();
1663
1664 buffer_region = {0, 0, sizeof(uint32_t)};
1665 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_count2.handle(), buffer_count.handle(), 1, &buffer_region);
1666
1667 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1668 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1669 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1670 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1671 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1672 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1673 0, 1, &descriptor_set.set_, 0, nullptr);
1674
1675 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1676 fpCmdDrawIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, buffer_count.handle(), 0, 1,
1677 sizeof(VkDrawIndirectCommand));
1678 m_errorMonitor->VerifyFound();
1679
1680 m_commandBuffer->EndRenderPass();
1681 m_commandBuffer->end();
1682 }
1683
1684 // DrawIndexedIndirectCount
1685 auto fpCmdDrawIndexIndirectCountKHR =
1686 (PFN_vkCmdDrawIndirectCount)vk::GetDeviceProcAddr(m_device->device(), "vkCmdDrawIndexedIndirectCountKHR");
1687 if (!fpCmdDrawIndexIndirectCountKHR) {
1688 printf("%s Test requires unsupported vkCmdDrawIndexedIndirectCountKHR feature. Skipped.\n", kSkipPrefix);
1689 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -07001690 VkBufferObj buffer_count, buffer_count2;
1691 buffer_usage =
1692 VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1693 buffer_count.init(*m_device, buffer_count.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1694 buffer_count2.init(*m_device, buffer_count2.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1695
1696 m_commandBuffer->reset();
1697 m_commandBuffer->begin();
1698 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1699 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1700 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1701 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1702 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1703
1704 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1705 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1706 0, 1, &descriptor_set.set_, 0, nullptr);
1707 fpCmdDrawIndexIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndexedIndirect.handle(), 0, buffer_count.handle(),
1708 0, 1, sizeof(VkDrawIndexedIndirectCommand));
1709 m_commandBuffer->EndRenderPass();
1710 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001711
1712 m_commandBuffer->reset();
1713 m_commandBuffer->begin();
1714
1715 buffer_region = {0, 0, sizeof(uint32_t)};
1716 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_count2.handle(), buffer_count.handle(), 1, &buffer_region);
1717
1718 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1719 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1720 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1721 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1722 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1723 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1724 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1725 0, 1, &descriptor_set.set_, 0, nullptr);
1726
1727 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1728 fpCmdDrawIndexIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndexedIndirect.handle(), 0, buffer_count.handle(),
1729 0, 1, sizeof(VkDrawIndexedIndirectCommand));
1730 m_errorMonitor->VerifyFound();
1731
1732 m_commandBuffer->EndRenderPass();
1733 m_commandBuffer->end();
1734 }
1735 } else {
1736 printf("%s Test requires unsupported vkCmdDrawIndirectCountKHR & vkDrawIndexedIndirectCountKHR feature. Skipped.\n",
1737 kSkipPrefix);
1738 }
1739}
1740
1741TEST_F(VkSyncValTest, SyncCmdClear) {
1742 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1743 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
1744 // CmdClearColorImage
Jeremy Gebben170781d2020-11-19 16:21:21 -07001745 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1746 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1747 VkImageObj image_a(m_device), image_b(m_device);
1748 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
1749 image_a.Init(image_ci);
1750 image_b.Init(image_ci);
1751
1752 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1753 VkOffset3D zero_offset{0, 0, 0};
1754 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
1755 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
1756
1757 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
1758
1759 m_commandBuffer->begin();
1760
1761 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1762 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1763
1764 auto cb = m_commandBuffer->handle();
1765 VkClearColorValue ccv = {};
1766 vk::CmdClearColorImage(m_commandBuffer->handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
1767 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001768
1769 m_commandBuffer->reset();
1770 m_commandBuffer->begin();
1771 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
1772
1773 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1774 vk::CmdClearColorImage(m_commandBuffer->handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
1775 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1776 vk::CmdClearColorImage(m_commandBuffer->handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
1777 m_errorMonitor->VerifyFound();
1778
1779 m_commandBuffer->end();
1780
1781 // CmdClearDepthStencilImage
1782 format = FindSupportedDepthStencilFormat(gpu());
1783 if (!format) {
1784 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
1785 return;
1786 }
Jeremy Gebben170781d2020-11-19 16:21:21 -07001787 VkImageObj image_ds_a(m_device), image_ds_b(m_device);
1788 image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
1789 image_ds_a.Init(image_ci);
1790 image_ds_b.Init(image_ci);
1791
1792 const VkImageAspectFlags ds_aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
1793 image_ds_a.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_GENERAL);
1794 image_ds_b.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_GENERAL);
1795
1796 m_commandBuffer->begin();
1797 const VkClearDepthStencilValue clear_value = {};
1798 VkImageSubresourceRange ds_range = {ds_aspect, 0, 1, 0, 1};
1799
1800 vk::CmdClearDepthStencilImage(cb, image_ds_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1, &ds_range);
1801 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001802
1803 VkImageSubresourceLayers ds_layers_all{ds_aspect, 0, 0, 1};
1804 VkImageCopy ds_full_region = {ds_layers_all, zero_offset, ds_layers_all, zero_offset, full_extent};
1805
1806 m_commandBuffer->reset();
1807 m_commandBuffer->begin();
1808 vk::CmdCopyImage(cb, image_ds_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_ds_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1809 &ds_full_region);
1810
1811 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1812 vk::CmdClearDepthStencilImage(m_commandBuffer->handle(), image_ds_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1,
1813 &ds_range);
1814 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1815 vk::CmdClearDepthStencilImage(m_commandBuffer->handle(), image_ds_b.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1,
1816 &ds_range);
1817 m_errorMonitor->VerifyFound();
1818
1819 m_commandBuffer->end();
1820}
1821
1822TEST_F(VkSyncValTest, SyncCmdQuery) {
1823 // CmdCopyQueryPoolResults
Jeremy Gebben170781d2020-11-19 16:21:21 -07001824 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1825 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
1826 if (IsPlatform(kNexusPlayer)) {
1827 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
1828 return;
1829 }
1830 if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
1831 printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
1832 return;
1833 }
1834 uint32_t queue_count;
1835 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, NULL);
Jeremy Gebbend2573fc2021-05-12 17:17:38 -06001836 std::vector<VkQueueFamilyProperties> queue_props(queue_count);
1837 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, queue_props.data());
Jeremy Gebben170781d2020-11-19 16:21:21 -07001838 if (queue_props[m_device->graphics_queue_node_index_].timestampValidBits == 0) {
1839 printf("%s Device graphic queue has timestampValidBits of 0, skipping.\n", kSkipPrefix);
1840 return;
1841 }
1842
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001843 vk_testing::QueryPool query_pool;
sfricke-samsung6fc3e322022-02-15 22:41:29 -08001844 VkQueryPoolCreateInfo query_pool_create_info = LvlInitStruct<VkQueryPoolCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001845 query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
1846 query_pool_create_info.queryCount = 1;
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001847 query_pool.init(*m_device, query_pool_create_info);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001848
1849 VkBufferObj buffer_a, buffer_b;
1850 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1851 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
1852 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
1853
1854 VkBufferCopy region = {0, 0, 256};
1855
1856 auto cb = m_commandBuffer->handle();
1857 m_commandBuffer->begin();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001858 vk::CmdResetQueryPool(cb, query_pool.handle(), 0, 1);
1859 vk::CmdWriteTimestamp(cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool.handle(), 0);
1860 vk::CmdCopyQueryPoolResults(cb, query_pool.handle(), 0, 1, buffer_a.handle(), 0, 0, VK_QUERY_RESULT_WAIT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001861 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001862
1863 m_commandBuffer->reset();
1864 m_commandBuffer->begin();
1865 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001866 vk::CmdResetQueryPool(cb, query_pool.handle(), 0, 1);
1867 vk::CmdWriteTimestamp(cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool.handle(), 0);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001868 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001869 vk::CmdCopyQueryPoolResults(cb, query_pool.handle(), 0, 1, buffer_a.handle(), 0, 256, VK_QUERY_RESULT_WAIT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001870 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001871 vk::CmdCopyQueryPoolResults(cb, query_pool.handle(), 0, 1, buffer_b.handle(), 0, 256, VK_QUERY_RESULT_WAIT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001872 m_commandBuffer->end();
1873 m_errorMonitor->VerifyFound();
1874
1875 // TODO:Track VkQueryPool
1876 // TODO:CmdWriteTimestamp
Jeremy Gebben170781d2020-11-19 16:21:21 -07001877}
1878
1879TEST_F(VkSyncValTest, SyncCmdDrawDepthStencil) {
1880 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1881 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
Jeremy Gebben170781d2020-11-19 16:21:21 -07001882
paul-lunarg1fa28862022-08-26 19:18:37 +02001883 auto format_ds = FindSupportedDepthStencilFormat(gpu());
Jeremy Gebben170781d2020-11-19 16:21:21 -07001884 if (!format_ds) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06001885 GTEST_SKIP() << "No Depth + Stencil format found. Skipped.";
Jeremy Gebben170781d2020-11-19 16:21:21 -07001886 }
paul-lunarg1fa28862022-08-26 19:18:37 +02001887
1888 // Vulkan doesn't support copying between different depth stencil formats, so the formats have to change.
1889 auto format_dp = format_ds;
1890 auto format_st = format_ds;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001891
1892 VkDepthStencilObj image_ds(m_device), image_dp(m_device), image_st(m_device);
paul-lunargb01fd292022-08-24 16:59:08 +02001893 image_ds.Init(m_device, 16, 16, format_ds, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
1894 image_dp.Init(m_device, 16, 16, format_dp, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
1895 image_st.Init(m_device, 16, 16, format_st, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001896
1897 VkRenderpassObj rp_ds(m_device, format_ds, true), rp_dp(m_device, format_dp, true), rp_st(m_device, format_st, true);
1898
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001899 vk_testing::Framebuffer fb_ds, fb_dp, fb_st;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001900 VkFramebufferCreateInfo fbci = {
1901 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp_ds.handle(), 1, image_ds.BindInfo(), 16, 16, 1};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001902 fb_ds.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001903 fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp_dp.handle(), 1, image_dp.BindInfo(), 16, 16, 1};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001904 fb_dp.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001905 fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp_st.handle(), 1, image_st.BindInfo(), 16, 16, 1};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001906 fb_st.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001907
1908 VkStencilOpState stencil = {};
1909 stencil.failOp = VK_STENCIL_OP_KEEP;
1910 stencil.passOp = VK_STENCIL_OP_KEEP;
1911 stencil.depthFailOp = VK_STENCIL_OP_KEEP;
1912 stencil.compareOp = VK_COMPARE_OP_NEVER;
1913
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001914 auto ds_ci = LvlInitStruct<VkPipelineDepthStencilStateCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001915 ds_ci.depthTestEnable = VK_TRUE;
1916 ds_ci.depthWriteEnable = VK_TRUE;
1917 ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
1918 ds_ci.stencilTestEnable = VK_TRUE;
1919 ds_ci.front = stencil;
1920 ds_ci.back = stencil;
1921
1922 CreatePipelineHelper g_pipe_ds(*this), g_pipe_dp(*this), g_pipe_st(*this);
1923 g_pipe_ds.InitInfo();
1924 g_pipe_ds.gp_ci_.renderPass = rp_ds.handle();
1925 g_pipe_ds.gp_ci_.pDepthStencilState = &ds_ci;
1926 g_pipe_ds.InitState();
1927 ASSERT_VK_SUCCESS(g_pipe_ds.CreateGraphicsPipeline());
1928 g_pipe_dp.InitInfo();
1929 g_pipe_dp.gp_ci_.renderPass = rp_dp.handle();
1930 ds_ci.stencilTestEnable = VK_FALSE;
1931 g_pipe_dp.gp_ci_.pDepthStencilState = &ds_ci;
1932 g_pipe_dp.InitState();
1933 ASSERT_VK_SUCCESS(g_pipe_dp.CreateGraphicsPipeline());
1934 g_pipe_st.InitInfo();
1935 g_pipe_st.gp_ci_.renderPass = rp_st.handle();
1936 ds_ci.depthTestEnable = VK_FALSE;
1937 ds_ci.stencilTestEnable = VK_TRUE;
1938 g_pipe_st.gp_ci_.pDepthStencilState = &ds_ci;
1939 g_pipe_st.InitState();
1940 ASSERT_VK_SUCCESS(g_pipe_st.CreateGraphicsPipeline());
1941
1942 m_commandBuffer->begin();
1943 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
1944 m_renderPassBeginInfo.pClearValues = nullptr;
1945 m_renderPassBeginInfo.clearValueCount = 0;
1946
1947 m_renderPassBeginInfo.renderPass = rp_ds.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001948 m_renderPassBeginInfo.framebuffer = fb_ds.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001949 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1950 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_ds.pipeline_);
1951 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1952 m_commandBuffer->EndRenderPass();
1953
1954 m_renderPassBeginInfo.renderPass = rp_dp.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001955 m_renderPassBeginInfo.framebuffer = fb_dp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001956 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1957 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_dp.pipeline_);
1958 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1959 m_commandBuffer->EndRenderPass();
1960
1961 m_renderPassBeginInfo.renderPass = rp_st.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001962 m_renderPassBeginInfo.framebuffer = fb_st.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001963 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1964 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_st.pipeline_);
1965 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1966 m_commandBuffer->EndRenderPass();
1967
1968 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001969
1970 m_commandBuffer->reset();
1971 m_commandBuffer->begin();
1972
paul-lunargb01fd292022-08-24 16:59:08 +02001973 image_ds.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT, VK_IMAGE_LAYOUT_GENERAL);
paul-lunarg1fa28862022-08-26 19:18:37 +02001974 image_dp.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT, VK_IMAGE_LAYOUT_GENERAL);
1975 image_st.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT, VK_IMAGE_LAYOUT_GENERAL);
paul-lunargb01fd292022-08-24 16:59:08 +02001976
Jeremy Gebben170781d2020-11-19 16:21:21 -07001977 VkImageCopy copyRegion;
paul-lunarg1fa28862022-08-26 19:18:37 +02001978 copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001979 copyRegion.srcSubresource.mipLevel = 0;
1980 copyRegion.srcSubresource.baseArrayLayer = 0;
1981 copyRegion.srcSubresource.layerCount = 1;
1982 copyRegion.srcOffset = {0, 0, 0};
paul-lunarg1fa28862022-08-26 19:18:37 +02001983 copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001984 copyRegion.dstSubresource.mipLevel = 0;
1985 copyRegion.dstSubresource.baseArrayLayer = 0;
1986 copyRegion.dstSubresource.layerCount = 1;
1987 copyRegion.dstOffset = {0, 0, 0};
1988 copyRegion.extent = {16, 16, 1};
1989
paul-lunargb01fd292022-08-24 16:59:08 +02001990 m_commandBuffer->CopyImage(image_ds.handle(), VK_IMAGE_LAYOUT_GENERAL, image_dp.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1991 &copyRegion);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001992
paul-lunarg1fa28862022-08-26 19:18:37 +02001993 copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
1994 copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
paul-lunargb01fd292022-08-24 16:59:08 +02001995 m_commandBuffer->CopyImage(image_ds.handle(), VK_IMAGE_LAYOUT_GENERAL, image_st.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1996 &copyRegion);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001997 m_renderPassBeginInfo.renderPass = rp_ds.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001998 m_renderPassBeginInfo.framebuffer = fb_ds.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001999 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
2000 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2001 m_errorMonitor->VerifyFound();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002002
2003 m_renderPassBeginInfo.renderPass = rp_dp.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002004 m_renderPassBeginInfo.framebuffer = fb_dp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002005 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2006 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2007 m_errorMonitor->VerifyFound();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002008
2009 m_renderPassBeginInfo.renderPass = rp_st.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002010 m_renderPassBeginInfo.framebuffer = fb_st.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002011 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2012 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2013 m_errorMonitor->VerifyFound();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002014}
2015
John Zulaufd57a36b2021-08-16 10:34:44 -06002016
Jeremy Gebben170781d2020-11-19 16:21:21 -07002017TEST_F(VkSyncValTest, RenderPassLoadHazardVsInitialLayout) {
2018 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
John Zulaufd57a36b2021-08-16 10:34:44 -06002019 bool do_none_load_op_test = false;
2020 if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME)) {
2021 m_device_extension_names.push_back(VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME);
2022 do_none_load_op_test = true;
2023 }
2024
Jeremy Gebben170781d2020-11-19 16:21:21 -07002025 ASSERT_NO_FATAL_FAILURE(InitState());
2026 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2027
2028 VkImageUsageFlags usage_color = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2029 VkImageUsageFlags usage_input = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2030 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
2031 VkImageObj image_color(m_device), image_input(m_device);
2032 auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 1, format, usage_color, VK_IMAGE_TILING_OPTIMAL);
2033 image_color.Init(image_ci);
2034 image_ci.usage = usage_input;
2035 image_input.Init(image_ci);
2036 VkImageView attachments[] = {image_color.targetView(format), image_input.targetView(format)};
2037
John Zulaufd57a36b2021-08-16 10:34:44 -06002038 VkAttachmentDescription attachmentDescriptions[] = {
Jeremy Gebben170781d2020-11-19 16:21:21 -07002039 // Result attachment
2040 {(VkAttachmentDescriptionFlags)0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_CLEAR,
2041 VK_ATTACHMENT_STORE_OP_STORE, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
2042 VK_IMAGE_LAYOUT_UNDEFINED, // Here causes DesiredError that SYNC-HAZARD-NONE in BeginRenderPass.
2043 // It should be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
2044 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
2045 // Input attachment
2046 {(VkAttachmentDescriptionFlags)0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD,
2047 VK_ATTACHMENT_STORE_OP_STORE, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
2048 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL}};
2049
2050 const VkAttachmentReference resultAttachmentRef = {0u, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2051 const VkAttachmentReference inputAttachmentRef = {1u, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
2052
2053 const VkSubpassDescription subpassDescription = {(VkSubpassDescriptionFlags)0,
2054 VK_PIPELINE_BIND_POINT_GRAPHICS,
2055 1u,
2056 &inputAttachmentRef,
2057 1u,
2058 &resultAttachmentRef,
2059 0,
2060 0,
2061 0u,
2062 0};
2063
2064 const VkSubpassDependency subpassDependency = {VK_SUBPASS_EXTERNAL,
2065 0,
2066 VK_PIPELINE_STAGE_TRANSFER_BIT,
2067 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
2068 VK_ACCESS_TRANSFER_WRITE_BIT,
2069 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT,
2070 VK_DEPENDENCY_BY_REGION_BIT};
2071
2072 const VkRenderPassCreateInfo renderPassInfo = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
2073 0,
2074 (VkRenderPassCreateFlags)0,
2075 2u,
2076 attachmentDescriptions,
2077 1u,
2078 &subpassDescription,
2079 1u,
2080 &subpassDependency};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002081 vk_testing::RenderPass rp;
2082 rp.init(*m_device, renderPassInfo);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002083
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002084 vk_testing::Framebuffer fb;
2085 VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp.handle(), 2, attachments, 32, 32, 1};
2086 fb.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002087
2088 image_input.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
2089
2090 m_commandBuffer->begin();
2091
2092 m_renderPassBeginInfo.renderArea = {{0, 0}, {32, 32}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002093 m_renderPassBeginInfo.renderPass = rp.handle();
2094 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002095
2096 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
2097 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2098 // Even though we have no accesses prior, the layout transition *is* an access, so load can be validated vs. layout transition
2099 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2100 m_errorMonitor->VerifyFound();
John Zulaufd57a36b2021-08-16 10:34:44 -06002101
2102 vk_testing::RenderPass rp_no_load_store;
2103 if (do_none_load_op_test) {
John Zulaufd57a36b2021-08-16 10:34:44 -06002104 attachmentDescriptions[0].loadOp = VK_ATTACHMENT_LOAD_OP_NONE_EXT;
2105 attachmentDescriptions[0].storeOp = VK_ATTACHMENT_STORE_OP_NONE_EXT;
2106 attachmentDescriptions[1].loadOp = VK_ATTACHMENT_LOAD_OP_NONE_EXT;
2107 attachmentDescriptions[1].storeOp = VK_ATTACHMENT_STORE_OP_NONE_EXT;
2108 rp_no_load_store.init(*m_device, renderPassInfo);
2109 m_renderPassBeginInfo.renderPass = rp_no_load_store.handle();
2110 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2111 m_commandBuffer->EndRenderPass();
John Zulaufd57a36b2021-08-16 10:34:44 -06002112 } else {
2113 printf("%s VK_EXT_load_store_op_none not supported, skipping sub-test\n", kSkipPrefix);
2114 }
Jeremy Gebben170781d2020-11-19 16:21:21 -07002115}
2116
2117TEST_F(VkSyncValTest, SyncRenderPassWithWrongDepthStencilInitialLayout) {
2118 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2119 ASSERT_NO_FATAL_FAILURE(InitState());
2120 if (IsPlatform(kNexusPlayer)) {
2121 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
2122 return;
2123 }
2124
2125 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2126
2127 VkFormat color_format = VK_FORMAT_R8G8B8A8_UNORM;
2128 VkFormat ds_format = FindSupportedDepthStencilFormat(gpu());
2129 if (!ds_format) {
2130 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
2131 return;
2132 }
2133 VkImageUsageFlags usage_color = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2134 VkImageUsageFlags usage_ds = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
2135 VkImageObj image_color(m_device), image_color2(m_device);
2136 auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 1, color_format, usage_color, VK_IMAGE_TILING_OPTIMAL);
2137 image_color.Init(image_ci);
2138 image_color2.Init(image_ci);
2139 VkDepthStencilObj image_ds(m_device);
2140 image_ds.Init(m_device, 32, 32, ds_format, usage_ds);
2141
2142 const VkAttachmentDescription colorAttachmentDescription = {(VkAttachmentDescriptionFlags)0,
2143 color_format,
2144 VK_SAMPLE_COUNT_1_BIT,
2145 VK_ATTACHMENT_LOAD_OP_CLEAR,
2146 VK_ATTACHMENT_STORE_OP_STORE,
2147 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
2148 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2149 VK_IMAGE_LAYOUT_UNDEFINED,
2150 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2151
2152 const VkAttachmentDescription depthStencilAttachmentDescription = {
2153 (VkAttachmentDescriptionFlags)0, ds_format, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_CLEAR,
2154 VK_ATTACHMENT_STORE_OP_STORE, VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_STORE_OP_STORE,
2155 VK_IMAGE_LAYOUT_UNDEFINED, // Here causes DesiredError that SYNC-HAZARD-WRITE_AFTER_WRITE in BeginRenderPass.
2156 // It should be VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
2157 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
2158
2159 std::vector<VkAttachmentDescription> attachmentDescriptions;
2160 attachmentDescriptions.push_back(colorAttachmentDescription);
2161 attachmentDescriptions.push_back(depthStencilAttachmentDescription);
2162
2163 const VkAttachmentReference colorAttachmentRef = {0u, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2164
2165 const VkAttachmentReference depthStencilAttachmentRef = {1u, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
2166
2167 const VkSubpassDescription subpassDescription = {(VkSubpassDescriptionFlags)0,
2168 VK_PIPELINE_BIND_POINT_GRAPHICS,
2169 0u,
2170 0,
2171 1u,
2172 &colorAttachmentRef,
2173 0,
2174 &depthStencilAttachmentRef,
2175 0u,
2176 0};
2177
2178 const VkRenderPassCreateInfo renderPassInfo = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
2179 0,
2180 (VkRenderPassCreateFlags)0,
2181 (uint32_t)attachmentDescriptions.size(),
2182 &attachmentDescriptions[0],
2183 1u,
2184 &subpassDescription,
2185 0u,
2186 0};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002187 vk_testing::RenderPass rp;
2188 rp.init(*m_device, renderPassInfo);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002189
2190 VkImageView fb_attachments[] = {image_color.targetView(color_format),
2191 image_ds.targetView(ds_format, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)};
2192 const VkFramebufferCreateInfo fbci = {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002193 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, 0, 0u, rp.handle(), 2u, fb_attachments, 32, 32, 1u,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002194 };
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002195 vk_testing::Framebuffer fb;
2196 fb.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002197 fb_attachments[0] = image_color2.targetView(color_format);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002198 vk_testing::Framebuffer fb1;
2199 fb1.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002200
2201 CreatePipelineHelper g_pipe(*this);
2202 g_pipe.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002203 g_pipe.gp_ci_.renderPass = rp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002204
2205 VkStencilOpState stencil = {};
2206 stencil.failOp = VK_STENCIL_OP_KEEP;
2207 stencil.passOp = VK_STENCIL_OP_KEEP;
2208 stencil.depthFailOp = VK_STENCIL_OP_KEEP;
2209 stencil.compareOp = VK_COMPARE_OP_NEVER;
2210
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07002211 auto ds_ci = LvlInitStruct<VkPipelineDepthStencilStateCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002212 ds_ci.depthTestEnable = VK_TRUE;
2213 ds_ci.depthWriteEnable = VK_TRUE;
2214 ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
2215 ds_ci.stencilTestEnable = VK_TRUE;
2216 ds_ci.front = stencil;
2217 ds_ci.back = stencil;
2218
2219 g_pipe.gp_ci_.pDepthStencilState = &ds_ci;
2220 g_pipe.InitState();
2221 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
2222
2223 m_commandBuffer->begin();
Tony-LunarG73f37032021-06-07 11:47:03 -06002224 VkClearValue clear = {};
2225 std::array<VkClearValue, 2> clear_values = { {clear, clear} };
2226 m_renderPassBeginInfo.pClearValues = clear_values.data();
2227 m_renderPassBeginInfo.clearValueCount = clear_values.size();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002228 m_renderPassBeginInfo.renderArea = {{0, 0}, {32, 32}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002229 m_renderPassBeginInfo.renderPass = rp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002230
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002231 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002232 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2233 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
2234 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
2235 m_commandBuffer->EndRenderPass();
2236
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002237 m_renderPassBeginInfo.framebuffer = fb1.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002238
2239 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2240 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2241 m_errorMonitor->VerifyFound();
2242}
2243
John Zulauf01a49ee2022-07-13 11:37:08 -06002244struct CreateRenderPassHelper {
2245 struct SubpassDescriptionStore {
2246 const std::vector<VkAttachmentReference>& input_store;
2247 const std::vector<VkAttachmentReference>& color_store;
2248 VkSubpassDescription desc;
2249 SubpassDescriptionStore(const std::vector<VkAttachmentReference>& input, const std::vector<VkAttachmentReference>& color)
2250 : input_store(input), color_store(color) {
2251 desc = {
2252 0u,
2253 VK_PIPELINE_BIND_POINT_GRAPHICS,
2254 static_cast<uint32_t>(input_store.size()),
2255 input_store.data(),
2256 static_cast<uint32_t>(color_store.size()),
2257 color_store.data(),
2258 nullptr,
2259 nullptr,
2260 0u,
2261 nullptr,
2262 };
2263 if (desc.inputAttachmentCount == 0) {
2264 desc.pInputAttachments = nullptr;
2265 }
2266 if (desc.colorAttachmentCount == 0) {
2267 desc.pColorAttachments = nullptr;
2268 }
2269 }
2270 };
2271
John Zulauf2f5947d2022-07-27 15:36:31 -06002272 VkImageUsageFlags usage_color =
2273 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
John Zulauf01a49ee2022-07-13 11:37:08 -06002274 VkImageUsageFlags usage_input =
2275 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2276 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
2277 VkClearColorValue ccv = {};
2278
2279 VkDeviceObj* dev;
2280 const static uint32_t kDefaultImageSize = 64;
2281 uint32_t width = kDefaultImageSize;
2282 uint32_t height = kDefaultImageSize;
2283 std::shared_ptr<VkImageObj> image_color;
2284 std::shared_ptr<VkImageObj> image_input;
2285 VkImageView view_input = VK_NULL_HANDLE;
2286 VkImageView view_color = VK_NULL_HANDLE;
2287
2288 VkAttachmentReference color_ref;
2289 VkAttachmentReference input_ref;
2290 std::vector<VkImageView> attachments;
2291 VkAttachmentDescription fb_attach_desc;
2292 VkAttachmentDescription input_attach_desc;
2293 std::vector<VkAttachmentDescription> attachment_descs;
2294 std::vector<VkAttachmentReference> input_attachments;
2295 std::vector<VkAttachmentReference> color_attachments;
2296 std::vector<VkSubpassDependency> subpass_dep;
2297 std::vector<VkSubpassDescription> subpasses;
2298 std::vector<SubpassDescriptionStore> subpass_description_store;
2299 VkRenderPassCreateInfo render_pass_create_info;
John Zulauf2f5947d2022-07-27 15:36:31 -06002300 std::shared_ptr<vk_testing::RenderPass> render_pass;
John Zulauf01a49ee2022-07-13 11:37:08 -06002301 std::shared_ptr<vk_testing::Framebuffer> framebuffer;
2302 VkRenderPassBeginInfo render_pass_begin;
2303 std::vector<VkClearValue> clear_colors;
2304
2305 CreateRenderPassHelper(VkDeviceObj* dev_)
2306 : dev(dev_),
2307 image_color(std::make_shared<VkImageObj>(dev)),
2308 image_input(std::make_shared<VkImageObj>(dev)),
2309 color_ref(DefaultColorRef()),
2310 input_ref(DefaultInputRef()),
2311 fb_attach_desc(DefaultFbAttachDesc()),
2312 input_attach_desc(DefaultInputAttachDesc()) {}
2313
2314 CreateRenderPassHelper(const CreateRenderPassHelper& other) = default;
2315
2316 void InitImageAndView() {
2317 auto image_ci = VkImageObj::ImageCreateInfo2D(width, height, 1, 1, format, usage_input, VK_IMAGE_TILING_OPTIMAL);
2318 image_input->InitNoLayout(image_ci);
2319 image_ci.usage = usage_color;
2320 image_color->InitNoLayout(image_ci);
2321
2322 view_input = image_input->targetView(format);
2323 view_color = image_color->targetView(format);
2324 attachments = {view_color, view_input};
2325 }
2326
John Zulauf2f5947d2022-07-27 15:36:31 -06002327 static VkAttachmentReference DefaultColorRef() {
John Zulauf01a49ee2022-07-13 11:37:08 -06002328 return {
2329 0u,
2330 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
2331 };
2332 }
2333
John Zulauf2f5947d2022-07-27 15:36:31 -06002334 static VkAttachmentReference DefaultInputRef() {
John Zulauf01a49ee2022-07-13 11:37:08 -06002335 return {
2336 1u,
2337 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2338 };
2339 };
2340
John Zulauf2f5947d2022-07-27 15:36:31 -06002341 static VkAttachmentReference UnusedColorAttachmentRef() {
2342 return {
2343 VK_ATTACHMENT_UNUSED,
2344 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2345 };
2346 };
2347
John Zulauf01a49ee2022-07-13 11:37:08 -06002348 VkAttachmentDescription DefaultFbAttachDesc() {
2349 return VkAttachmentDescription{
2350 0u,
2351 format,
2352 VK_SAMPLE_COUNT_1_BIT,
2353 VK_ATTACHMENT_LOAD_OP_CLEAR,
2354 VK_ATTACHMENT_STORE_OP_STORE,
2355 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
2356 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2357 VK_IMAGE_LAYOUT_UNDEFINED,
2358 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
2359 };
2360 }
2361 VkAttachmentDescription DefaultInputAttachDesc() const {
2362 return VkAttachmentDescription{
2363 0u,
2364 format,
2365 VK_SAMPLE_COUNT_1_BIT,
2366 VK_ATTACHMENT_LOAD_OP_LOAD,
2367 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2368 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
2369 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2370 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2371 VK_IMAGE_LAYOUT_GENERAL,
2372 };
2373 }
2374
John Zulauf2f5947d2022-07-27 15:36:31 -06002375 void InitAllAttachmentsToLayoutGeneral() {
2376 fb_attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
2377 fb_attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
2378 color_ref.layout = VK_IMAGE_LAYOUT_GENERAL;
2379 input_attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
2380 input_attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
2381 input_ref.layout = VK_IMAGE_LAYOUT_GENERAL;
2382 }
2383
2384 void SetAttachmentLayout(VkImageObj* image, const VkAttachmentDescription& attach_desc) {
2385 if (image && image->initialized() && (attach_desc.initialLayout != VK_IMAGE_LAYOUT_UNDEFINED)) {
2386 image->SetLayout(attach_desc.initialLayout);
2387 }
2388 }
2389
2390 void SetColorLayout() { SetAttachmentLayout(image_color.get(), fb_attach_desc); }
2391 void SetInputLayout() { SetAttachmentLayout(image_input.get(), input_attach_desc); }
2392
2393 void InitAttachmentLayouts() {
2394 SetColorLayout();
2395 SetInputLayout();
2396 }
2397
John Zulauf01a49ee2022-07-13 11:37:08 -06002398 void InitAttachmentArrays() {
2399 // Add attachments
2400 if (attachment_descs.empty()) {
2401 attachment_descs = {fb_attach_desc, input_attach_desc};
2402 }
2403 if (color_attachments.empty()) {
2404 color_attachments = {color_ref};
2405 }
2406 if (input_attachments.empty()) {
2407 input_attachments = {input_ref};
2408 }
2409 }
2410
John Zulauf2f5947d2022-07-27 15:36:31 -06002411 void AddSubpassDescription(const std::vector<VkAttachmentReference>& input, const std::vector<VkAttachmentReference> color) {
2412 subpass_description_store.emplace_back(input, color);
2413 }
2414
2415 // Capture the current input and color attachements, which can then be modified
2416 void AddSubpassDescription() { subpass_description_store.emplace_back(input_attachments, color_attachments); }
2417
John Zulauf01a49ee2022-07-13 11:37:08 -06002418 // This is the default for a single subpass renderpass, don't call if you want to change that
2419 void InitSubpassDescription() {
2420 if (subpass_description_store.empty()) {
John Zulauf2f5947d2022-07-27 15:36:31 -06002421 AddSubpassDescription();
John Zulauf01a49ee2022-07-13 11:37:08 -06002422 }
2423 }
2424
2425 void InitSubpasses() {
2426 if (subpasses.empty()) {
2427 subpasses.reserve(subpass_description_store.size());
2428 for (const auto& desc_store : subpass_description_store) {
2429 subpasses.emplace_back(desc_store.desc);
2430 }
2431 }
2432 }
2433
2434 void InitRenderPassInfo() {
2435 render_pass_create_info = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
2436 nullptr,
2437 0u,
2438 static_cast<uint32_t>(attachment_descs.size()),
2439 attachment_descs.data(),
2440 static_cast<uint32_t>(subpasses.size()),
2441 subpasses.data(),
2442 static_cast<uint32_t>(subpass_dep.size()),
2443 subpass_dep.data()};
2444 }
2445
2446 void InitRenderPass() {
2447 InitAttachmentArrays();
2448 InitSubpassDescription();
2449 InitSubpasses();
2450 InitRenderPassInfo();
John Zulauf2f5947d2022-07-27 15:36:31 -06002451 render_pass = std::make_shared<vk_testing::RenderPass>();
2452 render_pass->init(*dev, render_pass_create_info);
John Zulauf01a49ee2022-07-13 11:37:08 -06002453 }
2454
2455 void InitFramebuffer() {
2456 framebuffer = std::make_shared<vk_testing::Framebuffer>();
2457 VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
2458 0,
2459 0u,
John Zulauf2f5947d2022-07-27 15:36:31 -06002460 render_pass->handle(),
John Zulauf01a49ee2022-07-13 11:37:08 -06002461 static_cast<uint32_t>(attachments.size()),
2462 attachments.data(),
2463 width,
2464 height,
2465 1u};
2466 framebuffer->init(*dev, fbci);
2467 }
2468
2469 void InitState() {
2470 InitImageAndView();
2471 }
2472
2473 void InitBeginInfo() {
2474 render_pass_begin = lvl_init_struct<VkRenderPassBeginInfo>();
2475 render_pass_begin.renderArea = {{0, 0}, {width, height}};
John Zulauf2f5947d2022-07-27 15:36:31 -06002476 render_pass_begin.renderPass = render_pass->handle();
John Zulauf01a49ee2022-07-13 11:37:08 -06002477 render_pass_begin.framebuffer = framebuffer->handle();
2478
2479 // Simplistic ensure enough clear colors, if not provided
2480 // TODO: Should eventually be smart enough to fill in color/depth as appropos
2481 VkClearValue fill_in;
2482 fill_in.color = ccv;
2483 for (size_t i = clear_colors.size(); i < attachments.size(); ++i) {
2484 clear_colors.push_back(fill_in);
2485 }
2486 render_pass_begin.clearValueCount = static_cast<uint32_t>(clear_colors.size());
2487 render_pass_begin.pClearValues = clear_colors.data();
2488 }
2489
John Zulauf2f5947d2022-07-27 15:36:31 -06002490 void InitPipelineHelper(CreatePipelineHelper& g_pipe) {
2491 g_pipe.InitInfo();
2492 g_pipe.ResetShaderInfo(bindStateVertShaderText, bindStateFragSubpassLoadInputText);
2493 g_pipe.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
2494 g_pipe.gp_ci_.renderPass = render_pass->handle();
2495 g_pipe.InitState();
2496 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
2497 }
2498
John Zulauf01a49ee2022-07-13 11:37:08 -06002499 void Init() {
2500 InitState();
2501 InitRenderPass();
2502 InitFramebuffer();
2503 InitBeginInfo();
2504 }
2505};
2506
2507struct SyncTestPipeline {
2508 VkLayerTest& test;
2509 VkRenderPass rp;
2510 CreatePipelineHelper g_pipe;
2511 VkShaderObj vs;
2512 VkShaderObj fs;
2513 VkSamplerCreateInfo sampler_info;
2514 vk_testing::Sampler sampler;
2515 VkImageView view_input = VK_NULL_HANDLE;
2516 SyncTestPipeline(VkLayerTest& test_, VkRenderPass rp_)
2517 : test(test_),
2518 rp(rp_),
2519 g_pipe(test),
2520 vs(&test, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT),
2521 fs(&test, bindStateFragSubpassLoadInputText, VK_SHADER_STAGE_FRAGMENT_BIT),
2522 sampler_info(SafeSaneSamplerCreateInfo()),
2523 sampler() {}
2524 void InitState() {
2525 VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
2526 sampler.init(*test.DeviceObj(), sampler_info);
2527 g_pipe.InitInfo();
2528 g_pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2529 g_pipe.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
2530 g_pipe.gp_ci_.renderPass = rp;
2531 g_pipe.InitState();
2532 }
2533 void Init() {
2534 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
2535 g_pipe.descriptor_set_->WriteDescriptorImageInfo(0, view_input, sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2536 g_pipe.descriptor_set_->UpdateDescriptorSets();
2537 }
2538};
2539
Jeremy Gebben170781d2020-11-19 16:21:21 -07002540TEST_F(VkSyncValTest, SyncLayoutTransition) {
2541 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2542 ASSERT_NO_FATAL_FAILURE(InitState());
2543 if (IsPlatform(kNexusPlayer)) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06002544 GTEST_SKIP() << "This test should not run on Nexus Player";
Jeremy Gebben170781d2020-11-19 16:21:21 -07002545 }
2546
John Zulauf01a49ee2022-07-13 11:37:08 -06002547 CreateRenderPassHelper rp_helper(m_device);
2548 rp_helper.Init();
2549 const VkImage image_input_handle = rp_helper.image_input->handle();
John Zulauf2f5947d2022-07-27 15:36:31 -06002550 const VkRenderPass rp = rp_helper.render_pass->handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002551
John Zulauf01a49ee2022-07-13 11:37:08 -06002552 SyncTestPipeline st_pipe(*this, rp);
2553 st_pipe.InitState();
2554 st_pipe.view_input = rp_helper.view_input;
2555 st_pipe.Init();
2556 const auto& g_pipe = st_pipe.g_pipe;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002557
2558 m_commandBuffer->begin();
2559 auto cb = m_commandBuffer->handle();
2560 VkClearColorValue ccv = {};
2561 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
2562
2563 const VkImageMemoryBarrier preClearBarrier = {
2564 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 0, 0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
John Zulauf01a49ee2022-07-13 11:37:08 -06002565 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 0, 0, image_input_handle, full_subresource_range,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002566 };
2567 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 1u,
2568 &preClearBarrier);
2569
John Zulauf01a49ee2022-07-13 11:37:08 -06002570 vk::CmdClearColorImage(m_commandBuffer->handle(), image_input_handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &ccv, 1,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002571 &full_subresource_range);
2572
2573 const VkImageMemoryBarrier postClearBarrier = {
2574 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2575 0,
2576 VK_ACCESS_TRANSFER_WRITE_BIT,
John Zulauffa44ab22022-07-14 15:12:28 -06002577 VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002578 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2579 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2580 0,
2581 0,
John Zulauf01a49ee2022-07-13 11:37:08 -06002582 image_input_handle,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002583 full_subresource_range,
2584 };
John Zulauffa44ab22022-07-14 15:12:28 -06002585 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT,
2586 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0u, 0u, nullptr,
2587 0u, nullptr, 1u, &postClearBarrier);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002588
John Zulauf01a49ee2022-07-13 11:37:08 -06002589 m_commandBuffer->BeginRenderPass(rp_helper.render_pass_begin);
John Zulauffa44ab22022-07-14 15:12:28 -06002590 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
2591 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
2592 &g_pipe.descriptor_set_->set_, 0, nullptr);
2593
2594 // Positive test for ordering rules between load and input attachment usage
2595 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
2596
2597 // Positive test for store ordering vs. input attachment and dependency *to* external for layout transition
2598 m_commandBuffer->EndRenderPass();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002599
2600 // Catch a conflict with the input attachment final layout transition
2601 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
John Zulauf01a49ee2022-07-13 11:37:08 -06002602 vk::CmdClearColorImage(m_commandBuffer->handle(), image_input_handle, VK_IMAGE_LAYOUT_GENERAL, &ccv, 1,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002603 &full_subresource_range);
2604 m_errorMonitor->VerifyFound();
John Zulaufe972b752021-05-04 15:47:17 -06002605
2606 // There should be no hazard for ILT after ILT
John Zulaufe972b752021-05-04 15:47:17 -06002607 m_commandBuffer->end();
paul-lunargb01fd292022-08-24 16:59:08 +02002608 vk::ResetCommandPool(device(), m_commandPool->handle(), 0);
John Zulaufe972b752021-05-04 15:47:17 -06002609 m_commandBuffer->begin();
2610 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 1u,
2611 &preClearBarrier);
2612 const VkImageMemoryBarrier wawBarrier = {
2613 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2614 0,
2615 VK_ACCESS_SHADER_READ_BIT,
2616 VK_ACCESS_SHADER_READ_BIT,
2617 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2618 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2619 0,
2620 0,
John Zulauf01a49ee2022-07-13 11:37:08 -06002621 image_input_handle,
John Zulaufe972b752021-05-04 15:47:17 -06002622 full_subresource_range,
2623 };
John Zulaufe972b752021-05-04 15:47:17 -06002624 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0u, 0u, nullptr, 0u,
2625 nullptr, 1u, &wawBarrier);
John Zulaufe972b752021-05-04 15:47:17 -06002626 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002627}
2628
2629TEST_F(VkSyncValTest, SyncSubpassMultiDep) {
2630 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2631 ASSERT_NO_FATAL_FAILURE(InitState());
2632 if (IsPlatform(kNexusPlayer)) {
2633 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
2634 return;
2635 }
2636
John Zulauf01a49ee2022-07-13 11:37:08 -06002637 CreateRenderPassHelper rp_helper_positive(m_device);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002638
Jeremy Gebben170781d2020-11-19 16:21:21 -07002639 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
2640 VkImageSubresourceLayers mip_0_layer_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
2641 VkOffset3D image_zero{0, 0, 0};
John Zulauf01a49ee2022-07-13 11:37:08 -06002642 VkExtent3D image_size{rp_helper_positive.width, rp_helper_positive.height, 1};
2643
Jeremy Gebben170781d2020-11-19 16:21:21 -07002644 VkImageCopy full_region{mip_0_layer_0, image_zero, mip_0_layer_0, image_zero, image_size};
2645
John Zulauf01a49ee2022-07-13 11:37:08 -06002646 rp_helper_positive.InitState();
John Zulauf2f5947d2022-07-27 15:36:31 -06002647 rp_helper_positive.InitAllAttachmentsToLayoutGeneral();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002648
John Zulauf01a49ee2022-07-13 11:37:08 -06002649 // Copy the comon state to the other renderpass helper
2650 CreateRenderPassHelper rp_helper_negative(m_device);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002651
John Zulauf01a49ee2022-07-13 11:37:08 -06002652 auto& subpass_dep_positive = rp_helper_positive.subpass_dep;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002653
John Zulauf01a49ee2022-07-13 11:37:08 -06002654 subpass_dep_positive.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2655 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
paul-lunargb01fd292022-08-24 16:59:08 +02002656 VK_DEPENDENCY_BY_REGION_BIT});
John Zulauf01a49ee2022-07-13 11:37:08 -06002657 subpass_dep_positive.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2658 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT,
paul-lunargb01fd292022-08-24 16:59:08 +02002659 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, VK_DEPENDENCY_BY_REGION_BIT});
John Zulauf01a49ee2022-07-13 11:37:08 -06002660 subpass_dep_positive.push_back({0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
2661 VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
paul-lunargb01fd292022-08-24 16:59:08 +02002662 VK_ACCESS_TRANSFER_READ_BIT, VK_DEPENDENCY_BY_REGION_BIT});
John Zulauf01a49ee2022-07-13 11:37:08 -06002663 subpass_dep_positive.push_back({0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
2664 VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
paul-lunargb01fd292022-08-24 16:59:08 +02002665 VK_ACCESS_TRANSFER_WRITE_BIT, VK_DEPENDENCY_BY_REGION_BIT});
Jeremy Gebben170781d2020-11-19 16:21:21 -07002666
John Zulauf01a49ee2022-07-13 11:37:08 -06002667 rp_helper_positive.InitRenderPass();
2668 rp_helper_positive.InitFramebuffer();
2669 rp_helper_positive.InitBeginInfo();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002670
John Zulauf01a49ee2022-07-13 11:37:08 -06002671 auto& subpass_dep_negative = rp_helper_negative.subpass_dep;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002672 subpass_dep_negative.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2673 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
paul-lunargb01fd292022-08-24 16:59:08 +02002674 VK_DEPENDENCY_BY_REGION_BIT});
Jeremy Gebben170781d2020-11-19 16:21:21 -07002675 // Show that the two barriers do *not* chain by breaking the positive barrier into two bits.
2676 subpass_dep_negative.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2677 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, 0,
paul-lunargb01fd292022-08-24 16:59:08 +02002678 VK_DEPENDENCY_BY_REGION_BIT});
Jeremy Gebben170781d2020-11-19 16:21:21 -07002679 subpass_dep_negative.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
2680 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
paul-lunargb01fd292022-08-24 16:59:08 +02002681 VK_DEPENDENCY_BY_REGION_BIT});
2682
2683 rp_helper_negative.InitAllAttachmentsToLayoutGeneral();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002684
John Zulauf01a49ee2022-07-13 11:37:08 -06002685 // Negative and postive RP's are compatible.
paul-lunargb01fd292022-08-24 16:59:08 +02002686 rp_helper_negative.attachments = rp_helper_positive.attachments;
2687 rp_helper_negative.InitRenderPass();
2688 rp_helper_negative.InitFramebuffer();
John Zulauf01a49ee2022-07-13 11:37:08 -06002689 rp_helper_negative.InitBeginInfo();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002690
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002691 vk_testing::Sampler sampler;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002692 VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002693 sampler.init(*m_device, sampler_info);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002694
Jeremy Gebben170781d2020-11-19 16:21:21 -07002695
2696 CreatePipelineHelper g_pipe(*this);
John Zulauf2f5947d2022-07-27 15:36:31 -06002697 rp_helper_positive.InitPipelineHelper(g_pipe);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002698
John Zulauf2f5947d2022-07-27 15:36:31 -06002699 g_pipe.descriptor_set_->WriteDescriptorImageInfo(0, rp_helper_positive.view_input, VK_NULL_HANDLE,
paul-lunargb01fd292022-08-24 16:59:08 +02002700 VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_IMAGE_LAYOUT_GENERAL);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002701 g_pipe.descriptor_set_->UpdateDescriptorSets();
2702
2703 m_commandBuffer->begin();
2704 auto cb = m_commandBuffer->handle();
2705 VkClearColorValue ccv = {};
2706
2707 const VkImageMemoryBarrier xferDestBarrier = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2708 nullptr,
2709 VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
2710 VK_ACCESS_TRANSFER_WRITE_BIT,
2711 VK_IMAGE_LAYOUT_GENERAL,
2712 VK_IMAGE_LAYOUT_GENERAL,
2713 VK_QUEUE_FAMILY_IGNORED,
2714 VK_QUEUE_FAMILY_IGNORED,
2715 VK_NULL_HANDLE,
2716 full_subresource_range};
2717 const VkImageMemoryBarrier xferDestToSrcBarrier = {
2718 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2719 nullptr,
2720 VK_ACCESS_TRANSFER_WRITE_BIT,
2721 VK_ACCESS_TRANSFER_READ_BIT,
2722 VK_IMAGE_LAYOUT_GENERAL,
2723 VK_IMAGE_LAYOUT_GENERAL,
2724 VK_QUEUE_FAMILY_IGNORED,
2725 VK_QUEUE_FAMILY_IGNORED,
2726 VK_NULL_HANDLE,
2727 full_subresource_range,
2728 };
2729
John Zulauf01a49ee2022-07-13 11:37:08 -06002730 const VkImage image_color = rp_helper_positive.image_color->handle();
2731 const VkImage image_input = rp_helper_positive.image_input->handle();
2732
Jeremy Gebben170781d2020-11-19 16:21:21 -07002733 VkImageMemoryBarrier preClearBarrier = xferDestBarrier;
John Zulauf01a49ee2022-07-13 11:37:08 -06002734 preClearBarrier.image = image_color;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002735
2736 VkImageMemoryBarrier preCopyBarriers[2] = {xferDestToSrcBarrier, xferDestBarrier};
John Zulauf01a49ee2022-07-13 11:37:08 -06002737 preCopyBarriers[0].image = image_color;
2738 preCopyBarriers[1].image = image_input;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002739 // Positive test for ordering rules between load and input attachment usage
Jeremy Gebben170781d2020-11-19 16:21:21 -07002740
2741 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 1u,
2742 &preClearBarrier);
2743
John Zulauf01a49ee2022-07-13 11:37:08 -06002744 vk::CmdClearColorImage(m_commandBuffer->handle(), image_color, VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002745
2746 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 2u,
2747 preCopyBarriers);
2748
John Zulauf01a49ee2022-07-13 11:37:08 -06002749 vk::CmdCopyImage(m_commandBuffer->handle(), image_color, VK_IMAGE_LAYOUT_GENERAL, image_input, VK_IMAGE_LAYOUT_GENERAL, 1u,
2750 &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002751
2752 // No post copy image barrier, we are testing the subpass dependencies
2753
Jeremy Gebben170781d2020-11-19 16:21:21 -07002754 // Postive renderpass multidependency test
John Zulauf01a49ee2022-07-13 11:37:08 -06002755 m_commandBuffer->BeginRenderPass(rp_helper_positive.render_pass_begin);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002756 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
2757 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
2758 &g_pipe.descriptor_set_->set_, 0, nullptr);
2759
2760 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
2761
2762 // Positive test for store ordering vs. input attachment and dependency *to* external for layout transition
2763 m_commandBuffer->EndRenderPass();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002764
paul-lunargb01fd292022-08-24 16:59:08 +02002765 vk::CmdCopyImage(m_commandBuffer->handle(), image_color, VK_IMAGE_LAYOUT_GENERAL, image_input, VK_IMAGE_LAYOUT_GENERAL, 1u,
2766 &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002767
Jeremy Gebben170781d2020-11-19 16:21:21 -07002768 // Postive renderpass multidependency test, will fail IFF the dependencies are acting indepently.
2769 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "SYNC-HAZARD-READ_AFTER_WRITE");
John Zulauf01a49ee2022-07-13 11:37:08 -06002770 m_commandBuffer->BeginRenderPass(rp_helper_negative.render_pass_begin);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002771 m_errorMonitor->VerifyFound();
2772}
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002773
2774TEST_F(VkSyncValTest, RenderPassAsyncHazard) {
2775 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2776 ASSERT_NO_FATAL_FAILURE(InitState());
2777
Nathaniel Cesariof9cd1a82021-07-24 08:48:55 -06002778 if (IsPlatform(kPixel3) || IsPlatform(kPixel3aXL)) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06002779 GTEST_SKIP() << "Temporarily disabling on Pixel 3 and Pixel 3a XL due to driver crash";
Nathaniel Cesariof9cd1a82021-07-24 08:48:55 -06002780 }
2781
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002782 // overall set up:
2783 // subpass 0:
2784 // write image 0
2785 // subpass 1:
2786 // read image 0
2787 // write image 1
2788 // subpass 2:
2789 // read image 0
2790 // write image 2
2791 // subpass 3:
2792 // read image 0
2793 // write image 3
2794 //
2795 // subpasses 1 & 2 can run in parallel but both should depend on 0
2796 // subpass 3 must run after 1 & 2 because otherwise the store operation will
2797 // race with the reads in the other subpasses.
2798
2799 constexpr VkFormat kFormat = VK_FORMAT_R8G8B8A8_UNORM;
2800 constexpr uint32_t kWidth = 32, kHeight = 32;
2801 constexpr uint32_t kNumImages = 4;
2802
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002803 VkImageCreateInfo src_img_info = LvlInitStruct<VkImageCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002804 src_img_info.flags = 0;
2805 src_img_info.imageType = VK_IMAGE_TYPE_2D;
2806 src_img_info.format = kFormat;
2807 src_img_info.extent = {kWidth, kHeight, 1};
2808 src_img_info.mipLevels = 1;
2809 src_img_info.arrayLayers = 1;
paul-lunargb01fd292022-08-24 16:59:08 +02002810 src_img_info.samples = VK_SAMPLE_COUNT_1_BIT;
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002811 src_img_info.tiling = VK_IMAGE_TILING_OPTIMAL;
2812 src_img_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2813 src_img_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2814 src_img_info.queueFamilyIndexCount = 0;
2815 src_img_info.pQueueFamilyIndices = nullptr;
2816 src_img_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2817
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002818 VkImageCreateInfo dst_img_info = LvlInitStruct<VkImageCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002819 dst_img_info.flags = 0;
2820 dst_img_info.imageType = VK_IMAGE_TYPE_2D;
2821 dst_img_info.format = kFormat;
2822 dst_img_info.extent = {kWidth, kHeight, 1};
2823 dst_img_info.mipLevels = 1;
2824 dst_img_info.arrayLayers = 1;
2825 dst_img_info.samples = VK_SAMPLE_COUNT_1_BIT;
2826 dst_img_info.tiling = VK_IMAGE_TILING_OPTIMAL;
2827 dst_img_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2828 dst_img_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2829 dst_img_info.queueFamilyIndexCount = 0;
2830 dst_img_info.pQueueFamilyIndices = nullptr;
2831 dst_img_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2832
2833 std::vector<std::unique_ptr<VkImageObj>> images;
2834 for (uint32_t i = 0; i < kNumImages; i++) {
2835 images.emplace_back(new VkImageObj(m_device));
2836 }
2837 images[0]->Init(src_img_info);
2838 for (uint32_t i = 1; i < images.size(); i++) {
2839 images[i]->Init(dst_img_info);
2840 }
2841
2842 std::array<VkImageView, kNumImages> attachments{};
2843 std::array<VkAttachmentDescription, kNumImages> attachment_descriptions{};
2844 std::array<VkAttachmentReference, kNumImages> color_refs{};
2845 std::array<VkImageMemoryBarrier, kNumImages> img_barriers{};
2846
2847 for (uint32_t i = 0; i < attachments.size(); i++) {
2848 attachments[i] = images[i]->targetView(kFormat);
2849 attachment_descriptions[i] = {};
2850 attachment_descriptions[i].flags = 0;
2851 attachment_descriptions[i].format = kFormat;
2852 attachment_descriptions[i].samples = VK_SAMPLE_COUNT_1_BIT;
2853 attachment_descriptions[i].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
2854 attachment_descriptions[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
2855 attachment_descriptions[i].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
2856 attachment_descriptions[i].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
2857 attachment_descriptions[i].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2858 attachment_descriptions[i].finalLayout =
2859 (i == 0) ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2860
2861 color_refs[i] = {i, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2862
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002863 img_barriers[i] = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002864 img_barriers[i].srcAccessMask = 0;
2865 img_barriers[i].dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
2866 img_barriers[i].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2867 img_barriers[i].newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2868 img_barriers[i].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2869 img_barriers[i].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2870 img_barriers[i].image = images[i]->handle();
2871 img_barriers[i].subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
2872 }
2873
2874 const VkAttachmentReference input_ref{0u, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
2875
2876 std::array<std::array<uint32_t, 2>, kNumImages - 1> preserve_subpass{{{2, 3}, {1, 3}, {1, 2}}};
2877
2878 std::array<VkSubpassDescription, kNumImages> subpasses{};
2879
2880 subpasses[0].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
2881 subpasses[0].inputAttachmentCount = 0;
2882 subpasses[0].pInputAttachments = nullptr;
2883 subpasses[0].colorAttachmentCount = 1;
2884 subpasses[0].pColorAttachments = &color_refs[0];
2885
2886 for (uint32_t i = 1; i < subpasses.size(); i++) {
2887 subpasses[i].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
2888 subpasses[i].inputAttachmentCount = 1;
2889 subpasses[i].pInputAttachments = &input_ref;
2890 subpasses[i].colorAttachmentCount = 1;
paul-lunargb01fd292022-08-24 16:59:08 +02002891 subpasses[i].pColorAttachments = &color_refs[i];
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002892 subpasses[i].preserveAttachmentCount = preserve_subpass[i - 1].size();
2893 subpasses[i].pPreserveAttachments = preserve_subpass[i - 1].data();
2894 }
2895
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002896 VkRenderPassCreateInfo renderpass_info = LvlInitStruct<VkRenderPassCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002897 renderpass_info.flags = 0;
2898 renderpass_info.attachmentCount = attachment_descriptions.size();
2899 renderpass_info.pAttachments = attachment_descriptions.data();
2900 renderpass_info.subpassCount = subpasses.size();
2901 renderpass_info.pSubpasses = subpasses.data();
2902 renderpass_info.dependencyCount = 0;
2903 renderpass_info.pDependencies = nullptr;
2904
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002905 VkFramebufferCreateInfo fbci = LvlInitStruct<VkFramebufferCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002906 fbci.flags = 0;
2907 fbci.attachmentCount = attachments.size();
2908 fbci.pAttachments = attachments.data();
2909 fbci.width = kWidth;
2910 fbci.height = kHeight;
2911 fbci.layers = 1;
2912
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002913 vk_testing::Sampler sampler;
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002914 VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002915 sampler.init(*m_device, sampler_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002916
sfricke-samsungae54c1e2022-01-21 05:35:21 -08002917 VkShaderObj vs(this, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT);
John Zulauf01a49ee2022-07-13 11:37:08 -06002918 VkShaderObj fs(this, bindStateFragSubpassLoadInputText, VK_SHADER_STAGE_FRAGMENT_BIT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002919
2920 VkClearValue clear = {};
2921 clear.color = m_clear_color;
Tony-LunarG73f37032021-06-07 11:47:03 -06002922 std::array<VkClearValue, 4> clear_values = {{clear, clear, clear, clear}};
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002923
2924 // run the renderpass with no dependencies
2925 {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002926 vk_testing::RenderPass rp;
2927 vk_testing::Framebuffer fb;
2928 rp.init(*m_device, renderpass_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002929
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002930 fbci.renderPass = rp.handle();
2931 fb.init(*m_device, fbci);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002932
2933 CreatePipelineHelper g_pipe_0(*this);
2934 g_pipe_0.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002935 g_pipe_0.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002936 g_pipe_0.InitState();
2937 ASSERT_VK_SUCCESS(g_pipe_0.CreateGraphicsPipeline());
2938
2939 CreatePipelineHelper g_pipe_12(*this);
2940 g_pipe_12.InitInfo();
2941 g_pipe_12.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2942 g_pipe_12.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002943 g_pipe_12.gp_ci_.renderPass = rp.handle();
paul-lunargb01fd292022-08-24 16:59:08 +02002944 g_pipe_12.gp_ci_.subpass = 1;
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002945 g_pipe_12.InitState();
2946 ASSERT_VK_SUCCESS(g_pipe_12.CreateGraphicsPipeline());
2947
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002948 g_pipe_12.descriptor_set_->WriteDescriptorImageInfo(0, attachments[0], sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002949 g_pipe_12.descriptor_set_->UpdateDescriptorSets();
2950
2951 m_commandBuffer->begin();
2952
2953 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
2954 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, img_barriers.size(),
2955 img_barriers.data());
2956
2957 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
2958 m_renderPassBeginInfo.pClearValues = clear_values.data();
2959 m_renderPassBeginInfo.clearValueCount = clear_values.size();
2960
2961 m_renderPassBeginInfo.renderArea = {{0, 0}, {kWidth, kHeight}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002962 m_renderPassBeginInfo.renderPass = rp.handle();
2963 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002964
paul-lunargb01fd292022-08-24 16:59:08 +02002965 m_errorMonitor->SetUnexpectedError("UNASSIGNED-CoreValidation-DrawState-InvalidRenderpass");
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002966 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
2967 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_);
2968 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_layout_.handle(), 0,
2969 1, &g_pipe_0.descriptor_set_->set_, 0, NULL);
2970
2971 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
2972
2973 for (uint32_t i = 1; i < subpasses.size(); i++) {
2974 vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
2975 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_12.pipeline_);
2976 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS,
2977 g_pipe_12.pipeline_layout_.handle(), 0, 1, &g_pipe_12.descriptor_set_->set_, 0, NULL);
2978
2979 // we're racing the writes from subpass 0 with our shader reads
2980 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ-RACING-WRITE");
paul-lunargb01fd292022-08-24 16:59:08 +02002981 m_errorMonitor->SetUnexpectedError("VUID-vkCmdDraw-subpass-02685");
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002982 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
2983 m_errorMonitor->VerifyFound();
2984 }
2985
2986 // we should get an error from async checking in both subpasses 2 & 3
2987 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-WRITE");
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002988 vk::CmdEndRenderPass(m_commandBuffer->handle());
2989 m_errorMonitor->VerifyFound();
2990
paul-lunargb01fd292022-08-24 16:59:08 +02002991 vk::ResetCommandPool(device(), m_commandPool->handle(), 0);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002992 }
2993
2994 // add dependencies from subpass 0 to the others, which are necessary but not sufficient
2995 std::vector<VkSubpassDependency> subpass_dependencies;
2996 for (uint32_t i = 1; i < subpasses.size(); i++) {
2997 VkSubpassDependency dep{0,
2998 i,
2999 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
3000 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
3001 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
3002 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
3003 0};
3004 subpass_dependencies.push_back(dep);
3005 }
3006 renderpass_info.dependencyCount = subpass_dependencies.size();
3007 renderpass_info.pDependencies = subpass_dependencies.data();
3008
3009 {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003010 vk_testing::RenderPass rp;
3011 vk_testing::Framebuffer fb;
3012 rp.init(*m_device, renderpass_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003013
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003014 fbci.renderPass = rp.handle();
3015 fb.init(*m_device, fbci);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003016
3017 CreatePipelineHelper g_pipe_0(*this);
3018 g_pipe_0.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003019 g_pipe_0.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003020 g_pipe_0.InitState();
3021 ASSERT_VK_SUCCESS(g_pipe_0.CreateGraphicsPipeline());
3022
3023 CreatePipelineHelper g_pipe_12(*this);
3024 g_pipe_12.InitInfo();
3025 g_pipe_12.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
3026 g_pipe_12.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003027 g_pipe_12.gp_ci_.renderPass = rp.handle();
paul-lunargb01fd292022-08-24 16:59:08 +02003028 g_pipe_12.gp_ci_.subpass = 1;
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003029 g_pipe_12.InitState();
3030 ASSERT_VK_SUCCESS(g_pipe_12.CreateGraphicsPipeline());
3031
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003032 g_pipe_12.descriptor_set_->WriteDescriptorImageInfo(0, attachments[0], sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003033 g_pipe_12.descriptor_set_->UpdateDescriptorSets();
3034
3035 m_commandBuffer->begin();
3036
3037 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
3038 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, img_barriers.size(),
3039 img_barriers.data());
3040
3041 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
3042 m_renderPassBeginInfo.pClearValues = clear_values.data();
3043 m_renderPassBeginInfo.clearValueCount = clear_values.size();
3044
3045 m_renderPassBeginInfo.renderArea = {{0, 0}, {kWidth, kHeight}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003046 m_renderPassBeginInfo.renderPass = rp.handle();
3047 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003048
3049 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
3050 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_);
3051 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_layout_.handle(), 0,
3052 1, &g_pipe_0.descriptor_set_->set_, 0, NULL);
3053
3054 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3055
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003056 for (uint32_t i = 1; i < subpasses.size(); i++) {
3057 vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
3058 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_12.pipeline_);
3059 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS,
3060 g_pipe_12.pipeline_layout_.handle(), 0, 1, &g_pipe_12.descriptor_set_->set_, 0, NULL);
paul-lunargb01fd292022-08-24 16:59:08 +02003061
3062 m_errorMonitor->SetUnexpectedError("VUID-vkCmdDraw-subpass-02685");
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003063 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3064 }
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003065 // expect this error because 2 subpasses could try to do the store operation
paul-lunargb01fd292022-08-24 16:59:08 +02003066 // m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-WRITE");
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003067 // ... and this one because the store could happen during a shader read from another subpass
3068 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ");
3069 vk::CmdEndRenderPass(m_commandBuffer->handle());
3070 m_errorMonitor->VerifyFound();
3071
paul-lunargb01fd292022-08-24 16:59:08 +02003072 vk::ResetCommandPool(device(), m_commandPool->handle(), 0);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003073 }
3074
3075 // try again with correct dependencies to make subpass 3 depend on 1 & 2
3076 for (uint32_t i = 1; i < (subpasses.size() - 1); i++) {
3077 VkSubpassDependency dep{i,
3078 static_cast<uint32_t>(subpasses.size() - 1),
3079 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
3080 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
3081 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
3082 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
3083 0};
3084 subpass_dependencies.push_back(dep);
3085 }
3086 renderpass_info.dependencyCount = subpass_dependencies.size();
3087 renderpass_info.pDependencies = subpass_dependencies.data();
3088 {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003089 vk_testing::RenderPass rp;
3090 vk_testing::Framebuffer fb;
3091 rp.init(*m_device, renderpass_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003092
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003093 fbci.renderPass = rp.handle();
3094 fb.init(*m_device, fbci);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003095
3096 CreatePipelineHelper g_pipe_0(*this);
3097 g_pipe_0.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003098 g_pipe_0.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003099 g_pipe_0.InitState();
3100 ASSERT_VK_SUCCESS(g_pipe_0.CreateGraphicsPipeline());
3101
3102 CreatePipelineHelper g_pipe_12(*this);
3103 g_pipe_12.InitInfo();
3104 g_pipe_12.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
3105 g_pipe_12.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003106 g_pipe_12.gp_ci_.renderPass = rp.handle();
paul-lunargb01fd292022-08-24 16:59:08 +02003107 g_pipe_12.gp_ci_.subpass = 1;
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003108 g_pipe_12.InitState();
3109 ASSERT_VK_SUCCESS(g_pipe_12.CreateGraphicsPipeline());
3110
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003111 g_pipe_12.descriptor_set_->WriteDescriptorImageInfo(0, attachments[0], sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003112 g_pipe_12.descriptor_set_->UpdateDescriptorSets();
3113
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003114 m_commandBuffer->begin();
3115 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
3116 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, img_barriers.size(),
3117 img_barriers.data());
3118
3119 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
3120 m_renderPassBeginInfo.pClearValues = clear_values.data();
3121 m_renderPassBeginInfo.clearValueCount = clear_values.size();
3122
3123 m_renderPassBeginInfo.renderArea = {{0, 0}, {kWidth, kHeight}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003124 m_renderPassBeginInfo.renderPass = rp.handle();
3125 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003126
3127 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
3128 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_);
3129 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_layout_.handle(), 0,
3130 1, &g_pipe_0.descriptor_set_->set_, 0, NULL);
3131
3132 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3133
3134 for (uint32_t i = 1; i < subpasses.size(); i++) {
3135 vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
3136 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_12.pipeline_);
3137 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS,
3138 g_pipe_12.pipeline_layout_.handle(), 0, 1, &g_pipe_12.descriptor_set_->set_, 0, NULL);
paul-lunargb01fd292022-08-24 16:59:08 +02003139
3140 m_errorMonitor->SetUnexpectedError("VUID-vkCmdDraw-subpass-02685");
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003141 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3142 }
3143
3144 vk::CmdEndRenderPass(m_commandBuffer->handle());
3145
3146 m_commandBuffer->end();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003147 }
3148}
John Zulauf025ee442020-12-15 11:44:19 -07003149
3150TEST_F(VkSyncValTest, SyncEventsBufferCopy) {
3151 TEST_DESCRIPTION("Check Set/Wait protection for a variety of use cases using buffer copies");
3152 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3153 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3154
3155 VkBufferObj buffer_a;
3156 VkBufferObj buffer_b;
3157 VkBufferObj buffer_c;
3158 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3159 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
3160 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
3161 buffer_c.init_as_src_and_dst(*m_device, 256, mem_prop);
3162
3163 VkBufferCopy region = {0, 0, 256};
3164 VkBufferCopy front2front = {0, 0, 128};
3165 VkBufferCopy front2back = {0, 128, 128};
3166 VkBufferCopy back2back = {128, 128, 128};
3167
3168 VkEventObj event;
3169 event.init(*m_device, VkEventObj::create_info(0));
3170 VkEvent event_handle = event.handle();
3171
3172 auto cb = m_commandBuffer->handle();
3173 m_commandBuffer->begin();
3174
3175 // Copy after set for WAR (note we are writing to the back half of c but only reading from the front
John Zulauf025ee442020-12-15 11:44:19 -07003176 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3177 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3178 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_c.handle(), 1, &back2back);
3179 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3180 nullptr, 0, nullptr);
3181 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
John Zulauf025ee442020-12-15 11:44:19 -07003182 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
3183 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2back);
3184 m_errorMonitor->VerifyFound();
3185 m_commandBuffer->end();
3186
3187 // WAR prevented
3188 m_commandBuffer->reset();
3189 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003190 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3191 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3192 // Just protect against WAR, only need a sync barrier.
3193 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3194 nullptr, 0, nullptr);
3195 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
John Zulauf025ee442020-12-15 11:44:19 -07003196
3197 // Wait shouldn't prevent this WAW though, as it's only a synchronization barrier
3198 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3199 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
3200 m_errorMonitor->VerifyFound();
3201 m_commandBuffer->end();
3202
3203 // Prevent WAR and WAW
3204 m_commandBuffer->reset();
3205 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003206 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3207 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003208 auto mem_barrier_waw = LvlInitStruct<VkMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003209 mem_barrier_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3210 mem_barrier_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3211 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 1,
3212 &mem_barrier_waw, 0, nullptr, 0, nullptr);
3213 // The WAW should be safe (on a memory barrier)
3214 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
3215 // The WAR should also be safe (on a sync barrier)
3216 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
John Zulauf025ee442020-12-15 11:44:19 -07003217 m_commandBuffer->end();
3218
3219 // Barrier range check for WAW
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003220 auto buffer_barrier_front_waw = LvlInitStruct<VkBufferMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003221 buffer_barrier_front_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3222 buffer_barrier_front_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3223 buffer_barrier_front_waw.buffer = buffer_b.handle();
3224 buffer_barrier_front_waw.offset = front2front.dstOffset;
3225 buffer_barrier_front_waw.size = front2front.size;
3226
3227 // Front safe, back WAW
3228 m_commandBuffer->reset();
3229 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003230 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3231 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3232 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 1,
3233 &buffer_barrier_front_waw, 0, nullptr);
3234 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
John Zulauf025ee442020-12-15 11:44:19 -07003235 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3236 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &back2back);
3237 m_errorMonitor->VerifyFound();
3238 m_commandBuffer->end();
3239}
3240
3241TEST_F(VkSyncValTest, SyncEventsCopyImageHazards) {
3242 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3243 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3244
3245 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3246 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3247 VkImageObj image_a(m_device);
3248 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
3249 image_a.Init(image_ci);
3250 ASSERT_TRUE(image_a.initialized());
3251
3252 VkImageObj image_b(m_device);
3253 image_b.Init(image_ci);
3254 ASSERT_TRUE(image_b.initialized());
3255
3256 VkImageObj image_c(m_device);
3257 image_c.Init(image_ci);
3258 ASSERT_TRUE(image_c.initialized());
3259
3260 VkEventObj event;
3261 event.init(*m_device, VkEventObj::create_info(0));
3262 VkEvent event_handle = event.handle();
3263
3264 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 2};
3265 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
3266 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
3267 VkImageSubresourceRange layers_0_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
3268 VkOffset3D zero_offset{0, 0, 0};
3269 VkOffset3D half_offset{64, 64, 0};
3270 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
3271 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
3272
3273 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
3274 VkImageCopy region_0_to_0 = {layers_0, zero_offset, layers_0, zero_offset, full_extent};
3275 VkImageCopy region_1_to_1 = {layers_1, zero_offset, layers_1, zero_offset, full_extent};
3276 VkImageCopy region_0_q0toq0 = {layers_0, zero_offset, layers_0, zero_offset, half_extent};
3277 VkImageCopy region_0_q0toq3 = {layers_0, zero_offset, layers_0, half_offset, half_extent};
3278 VkImageCopy region_0_q3toq3 = {layers_0, half_offset, layers_0, half_offset, half_extent};
3279
3280 auto cb = m_commandBuffer->handle();
3281 auto copy_general = [cb](const VkImageObj &from, const VkImageObj &to, const VkImageCopy &region) {
3282 vk::CmdCopyImage(cb, from.handle(), VK_IMAGE_LAYOUT_GENERAL, to.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
3283 };
3284
3285 auto set_layouts = [this, &image_a, &image_b, &image_c]() {
3286 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
3287 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
3288 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
3289 };
3290
John Zulaufdd462092020-12-18 12:00:35 -07003291 // Scope check. One access in, one access not
John Zulauf025ee442020-12-15 11:44:19 -07003292 m_commandBuffer->begin();
3293 set_layouts();
John Zulauf025ee442020-12-15 11:44:19 -07003294 copy_general(image_a, image_b, full_region);
3295 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3296 copy_general(image_a, image_c, region_0_q3toq3);
3297 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3298 nullptr, 0, nullptr);
3299 copy_general(image_c, image_a, region_0_q0toq0);
John Zulauf025ee442020-12-15 11:44:19 -07003300 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
3301 copy_general(image_c, image_a, region_0_q0toq3);
3302 m_errorMonitor->VerifyFound();
3303 m_commandBuffer->end();
3304
3305 // WAR prevented
3306 m_commandBuffer->reset();
3307 m_commandBuffer->begin();
3308 set_layouts();
John Zulauf025ee442020-12-15 11:44:19 -07003309 copy_general(image_a, image_b, full_region);
3310 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3311 // Just protect against WAR, only need a sync barrier.
3312 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3313 nullptr, 0, nullptr);
3314 copy_general(image_c, image_a, full_region);
John Zulauf025ee442020-12-15 11:44:19 -07003315
3316 // Wait shouldn't prevent this WAW though, as it's only a synchronization barrier
3317 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3318 copy_general(image_c, image_b, full_region);
3319 m_errorMonitor->VerifyFound();
3320 m_commandBuffer->end();
3321
3322 // Prevent WAR and WAW
3323 m_commandBuffer->reset();
3324 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003325 set_layouts();
3326 copy_general(image_a, image_b, full_region);
3327 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003328 auto mem_barrier_waw = LvlInitStruct<VkMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003329 mem_barrier_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3330 mem_barrier_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3331 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 1,
3332 &mem_barrier_waw, 0, nullptr, 0, nullptr);
3333 // The WAW should be safe (on a memory barrier)
3334 copy_general(image_c, image_b, full_region);
3335 // The WAR should also be safe (on a sync barrier)
3336 copy_general(image_c, image_a, full_region);
John Zulauf025ee442020-12-15 11:44:19 -07003337 m_commandBuffer->end();
3338
3339 // Barrier range check for WAW
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003340 auto image_barrier_region0_waw = LvlInitStruct<VkImageMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003341 image_barrier_region0_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3342 image_barrier_region0_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3343 image_barrier_region0_waw.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
3344 image_barrier_region0_waw.newLayout = VK_IMAGE_LAYOUT_GENERAL;
3345 image_barrier_region0_waw.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
3346 image_barrier_region0_waw.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
3347 image_barrier_region0_waw.image = image_b.handle();
3348 image_barrier_region0_waw.subresourceRange = layers_0_subresource_range;
3349
3350 // Region 0 safe, back WAW
3351 m_commandBuffer->reset();
3352 m_commandBuffer->begin();
3353 set_layouts();
John Zulauf025ee442020-12-15 11:44:19 -07003354 copy_general(image_a, image_b, full_region);
3355 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3356 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3357 nullptr, 1, &image_barrier_region0_waw);
3358 copy_general(image_a, image_b, region_0_to_0);
John Zulauf025ee442020-12-15 11:44:19 -07003359 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3360 copy_general(image_a, image_b, region_1_to_1);
3361 m_errorMonitor->VerifyFound();
3362 m_commandBuffer->end();
3363}
John Zulauf4b5e4632020-12-15 11:48:59 -07003364
3365TEST_F(VkSyncValTest, SyncEventsCommandHazards) {
3366 TEST_DESCRIPTION("Check Set/Reset/Wait command hazard checking");
3367 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3368 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3369
3370 VkEventObj event;
3371 event.init(*m_device, VkEventObj::create_info(0));
3372
3373 const VkEvent event_handle = event.handle();
3374
3375 m_commandBuffer->begin();
John Zulauf4b5e4632020-12-15 11:48:59 -07003376 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003377
John Zulauf4edde622021-02-15 08:54:50 -07003378 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResetEvent-event-03834");
John Zulauf4b5e4632020-12-15 11:48:59 -07003379 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3380 nullptr, 0, nullptr);
3381 m_errorMonitor->VerifyFound();
John Zulauf4b5e4632020-12-15 11:48:59 -07003382 m_commandBuffer->end();
3383
3384 m_commandBuffer->begin();
3385 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3386 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, nullptr,
3387 0, nullptr, 0, nullptr);
John Zulauf4b5e4632020-12-15 11:48:59 -07003388 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdResetEvent-missingbarrier-wait");
3389 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3390 m_errorMonitor->VerifyFound();
John Zulauf4b5e4632020-12-15 11:48:59 -07003391 m_commandBuffer->end();
3392
3393 m_commandBuffer->begin();
3394 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003395 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdSetEvent-missingbarrier-reset");
3396 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3397 m_errorMonitor->VerifyFound();
3398
John Zulauf4b5e4632020-12-15 11:48:59 -07003399 m_commandBuffer->PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0U, 0, nullptr, 0,
3400 nullptr, 0, nullptr);
3401 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3402 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3403 nullptr, 0, nullptr);
3404 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3405 m_commandBuffer->PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0U, 0, nullptr, 0,
3406 nullptr, 0, nullptr);
3407 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003408
3409 // Need a barrier between set and a reset
3410 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdResetEvent-missingbarrier-set");
3411 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3412 m_errorMonitor->VerifyFound();
John Zulauf4b5e4632020-12-15 11:48:59 -07003413 m_commandBuffer->end();
3414
3415 m_commandBuffer->begin();
3416 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003417 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdSetEvent-missingbarrier-set");
3418 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3419 m_errorMonitor->VerifyFound();
3420
3421 m_commandBuffer->end();
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003422
3423 // Secondary command buffer events tests
3424 const auto cb = m_commandBuffer->handle();
3425 VkBufferObj buffer_a;
3426 VkBufferObj buffer_b;
3427 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3428 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
3429 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
3430
3431 VkBufferCopy front2front = {0, 0, 128};
3432
3433 // Barrier range check for WAW
3434 auto buffer_barrier_front_waw = LvlInitStruct<VkBufferMemoryBarrier>();
3435 buffer_barrier_front_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3436 buffer_barrier_front_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3437 buffer_barrier_front_waw.buffer = buffer_b.handle();
3438 buffer_barrier_front_waw.offset = front2front.dstOffset;
3439 buffer_barrier_front_waw.size = front2front.size;
3440
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003441 VkCommandBufferObj secondary_cb1(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
3442 VkCommandBuffer scb1 = secondary_cb1.handle();
3443 secondary_cb1.begin();
3444 secondary_cb1.WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 1,
3445 &buffer_barrier_front_waw, 0, nullptr);
3446 vk::CmdCopyBuffer(scb1, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
3447 secondary_cb1.end();
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003448
3449 // One secondary cb hazarding with primary
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003450 m_commandBuffer->reset();
3451 m_commandBuffer->begin();
3452 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003453 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3454 vk::CmdExecuteCommands(cb, 1, &scb1);
3455 m_errorMonitor->VerifyFound();
3456 m_commandBuffer->end();
3457
3458 // One secondary cb sharing event with primary
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003459 m_commandBuffer->reset();
3460 m_commandBuffer->begin();
3461 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
3462 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3463 vk::CmdExecuteCommands(cb, 1, &scb1);
3464 m_commandBuffer->end();
John Zulauf4b5e4632020-12-15 11:48:59 -07003465}
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003466
3467TEST_F(VkLayerTest, CmdWaitEvents2KHRUsedButSynchronizaion2Disabled) {
3468 TEST_DESCRIPTION("Using CmdWaitEvents2KHR when synchronization2 is not enabled");
Tony-LunarG53b72e52021-11-19 10:04:40 -07003469 SetTargetApiVersion(VK_API_VERSION_1_3);
sfricke-samsung6fc3e322022-02-15 22:41:29 -08003470
Tony-LunarGdf960d42022-01-27 16:13:34 -07003471 AddRequiredExtensions(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003472 ASSERT_NO_FATAL_FAILURE(InitFramework());
sjfricked700bc02022-05-30 16:35:06 +09003473 if (!AreRequiredExtensionsEnabled()) {
3474 GTEST_SKIP() << RequiredExtensionsNotSupported() << " not supported";
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003475 }
sjfricked8e01c52022-07-06 14:09:04 +09003476 ASSERT_NO_FATAL_FAILURE(InitState());
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003477
Tony-LunarG53b72e52021-11-19 10:04:40 -07003478 bool vulkan_13 = (DeviceValidationVersion() >= VK_API_VERSION_1_3);
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003479 auto fpCmdWaitEvents2KHR = (PFN_vkCmdWaitEvents2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdWaitEvents2KHR");
3480
3481 VkEventObj event;
3482 event.init(*m_device, VkEventObj::create_info(0));
3483 VkEvent event_handle = event.handle();
3484
3485 VkDependencyInfoKHR dependency_info = LvlInitStruct<VkDependencyInfoKHR>();
3486
3487 m_commandBuffer->begin();
Tony-LunarG279601c2021-11-16 10:50:51 -07003488 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWaitEvents2-synchronization2-03836");
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003489 fpCmdWaitEvents2KHR(m_commandBuffer->handle(), 1, &event_handle, &dependency_info);
3490 m_errorMonitor->VerifyFound();
Tony-LunarG53b72e52021-11-19 10:04:40 -07003491 if (vulkan_13) {
3492 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWaitEvents2-synchronization2-03836");
3493 vk::CmdWaitEvents2(m_commandBuffer->handle(), 1, &event_handle, &dependency_info);
3494 m_errorMonitor->VerifyFound();
3495 }
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003496 m_commandBuffer->end();
3497}
ziga-lunarg15f450d2021-08-26 23:10:05 +02003498
3499TEST_F(VkLayerTest, Sync2FeatureDisabled) {
3500 TEST_DESCRIPTION("Call sync2 functions when the feature is disabled");
3501
Tony-LunarG53b72e52021-11-19 10:04:40 -07003502 SetTargetApiVersion(VK_API_VERSION_1_3);
Tony-LunarGdf960d42022-01-27 16:13:34 -07003503 AddRequiredExtensions(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
ziga-lunarg15f450d2021-08-26 23:10:05 +02003504 ASSERT_NO_FATAL_FAILURE(InitFramework());
sjfricked700bc02022-05-30 16:35:06 +09003505 if (!AreRequiredExtensionsEnabled()) {
3506 GTEST_SKIP() << RequiredExtensionsNotSupported() << " not supported";
ziga-lunarg15f450d2021-08-26 23:10:05 +02003507 }
3508
3509 ASSERT_NO_FATAL_FAILURE(InitState());
3510
Tony-LunarG53b72e52021-11-19 10:04:40 -07003511 bool vulkan_13 = (DeviceValidationVersion() >= VK_API_VERSION_1_3);
ziga-lunarg15f450d2021-08-26 23:10:05 +02003512 VkPhysicalDeviceSynchronization2FeaturesKHR synchronization2 = LvlInitStruct<VkPhysicalDeviceSynchronization2FeaturesKHR>();
3513 synchronization2.synchronization2 = VK_FALSE; // Invalid
sjfricke11db0c72022-08-18 13:23:11 +09003514 GetPhysicalDeviceFeatures2(synchronization2);
ziga-lunarg15f450d2021-08-26 23:10:05 +02003515
3516 auto vkCmdPipelineBarrier2KHR =
3517 (PFN_vkCmdPipelineBarrier2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPipelineBarrier2KHR");
3518 auto vkCmdResetEvent2KHR = (PFN_vkCmdResetEvent2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdResetEvent2KHR");
3519 auto vkCmdSetEvent2KHR = (PFN_vkCmdSetEvent2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdSetEvent2KHR");
3520 auto vkCmdWriteTimestamp2KHR =
3521 (PFN_vkCmdWriteTimestamp2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdWriteTimestamp2KHR");
3522
3523 bool timestamp = false;
3524
3525 uint32_t queue_count;
3526 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, NULL);
3527 std::vector<VkQueueFamilyProperties> queue_props(queue_count);
3528 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, queue_props.data());
3529 if (queue_props[m_device->graphics_queue_node_index_].timestampValidBits > 0) {
3530 timestamp = true;
3531 }
3532
3533 m_commandBuffer->begin();
3534
3535 VkDependencyInfoKHR dependency_info = LvlInitStruct<VkDependencyInfoKHR>();
3536
Tony-LunarG279601c2021-11-16 10:50:51 -07003537 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdPipelineBarrier2-synchronization2-03848");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003538 vkCmdPipelineBarrier2KHR(m_commandBuffer->handle(), &dependency_info);
3539 m_errorMonitor->VerifyFound();
3540
3541 VkEventCreateInfo eci = LvlInitStruct<VkEventCreateInfo>();
3542 vk_testing::Event event;
3543 event.init(*m_device, eci);
3544
3545 VkPipelineStageFlagBits2KHR stage = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR;
3546
Tony-LunarG279601c2021-11-16 10:50:51 -07003547 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdResetEvent2-synchronization2-03829");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003548 vkCmdResetEvent2KHR(m_commandBuffer->handle(), event.handle(), stage);
3549 m_errorMonitor->VerifyFound();
3550
Tony-LunarG279601c2021-11-16 10:50:51 -07003551 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdSetEvent2-synchronization2-03824");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003552 vkCmdSetEvent2KHR(m_commandBuffer->handle(), event.handle(), &dependency_info);
3553 m_errorMonitor->VerifyFound();
3554
3555 if (timestamp) {
3556 VkQueryPoolCreateInfo qpci = LvlInitStruct<VkQueryPoolCreateInfo>();
3557 qpci.queryType = VK_QUERY_TYPE_TIMESTAMP;
3558 qpci.queryCount = 1;
3559
3560 vk_testing::QueryPool query_pool;
3561 query_pool.init(*m_device, qpci);
3562
Tony-LunarG279601c2021-11-16 10:50:51 -07003563 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWriteTimestamp2-synchronization2-03858");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003564 vkCmdWriteTimestamp2KHR(m_commandBuffer->handle(), stage, query_pool.handle(), 0);
3565 m_errorMonitor->VerifyFound();
Tony-LunarG53b72e52021-11-19 10:04:40 -07003566 if (vulkan_13) {
3567 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWriteTimestamp2-synchronization2-03858");
3568 vk::CmdWriteTimestamp2(m_commandBuffer->handle(), stage, query_pool.handle(), 0);
3569 m_errorMonitor->VerifyFound();
3570 }
3571 }
3572 if (vulkan_13) {
3573 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdPipelineBarrier2-synchronization2-03848");
3574 vk::CmdPipelineBarrier2(m_commandBuffer->handle(), &dependency_info);
3575 m_errorMonitor->VerifyFound();
3576
3577 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdResetEvent2-synchronization2-03829");
3578 vk::CmdResetEvent2(m_commandBuffer->handle(), event.handle(), stage);
3579 m_errorMonitor->VerifyFound();
3580
3581 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdSetEvent2-synchronization2-03824");
3582 vk::CmdSetEvent2(m_commandBuffer->handle(), event.handle(), &dependency_info);
3583 m_errorMonitor->VerifyFound();
ziga-lunarg15f450d2021-08-26 23:10:05 +02003584 }
3585
3586 m_commandBuffer->end();
3587}
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003588
3589TEST_F(VkSyncValTest, DestroyedUnusedDescriptors) {
3590 TEST_DESCRIPTION("Verify unused descriptors are ignored and don't crash syncval if they've been destroyed.");
3591 SetTargetApiVersion(VK_API_VERSION_1_1);
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003592 AddRequiredExtensions(VK_KHR_MAINTENANCE_3_EXTENSION_NAME);
3593 AddRequiredExtensions(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
3594
3595 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3596
sjfricked700bc02022-05-30 16:35:06 +09003597 if (!AreRequiredExtensionsEnabled()) {
3598 GTEST_SKIP() << RequiredExtensionsNotSupported() << " not supported";
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003599 }
3600
3601 auto indexing_features = LvlInitStruct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
sjfricke11db0c72022-08-18 13:23:11 +09003602 auto features2 = GetPhysicalDeviceFeatures2(indexing_features);
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003603 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
3604 if (!indexing_features.descriptorBindingPartiallyBound) {
3605 printf("%s Partially bound bindings not supported, skipping test\n", kSkipPrefix);
3606 return;
3607 }
3608 if (!indexing_features.descriptorBindingUpdateUnusedWhilePending) {
3609 printf("%s Updating unused while pending is not supported, skipping test\n", kSkipPrefix);
3610 return;
3611 }
3612
3613 ASSERT_NO_FATAL_FAILURE(InitViewport());
3614 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003615
sfricke-samsung6fc3e322022-02-15 22:41:29 -08003616 VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags =
3617 LvlInitStruct<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>();
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003618 constexpr size_t kNumDescriptors = 6;
3619
3620 std::array<VkDescriptorBindingFlagsEXT, kNumDescriptors> ds_binding_flags;
3621 for (auto &elem : ds_binding_flags) {
3622 elem = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT;
3623 }
3624
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003625 layout_createinfo_binding_flags.bindingCount = ds_binding_flags.size();
3626 layout_createinfo_binding_flags.pBindingFlags = ds_binding_flags.data();
3627
3628 // Prepare descriptors
3629 OneOffDescriptorSet descriptor_set(m_device,
3630 {
3631 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
3632 {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
3633 {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
3634 {3, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
3635 {4, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
3636 {5, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
3637 },
3638 0, &layout_createinfo_binding_flags, 0);
3639 const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
3640 uint32_t qfi = 0;
3641 auto buffer_create_info = LvlInitStruct<VkBufferCreateInfo>();
3642 buffer_create_info.size = 32;
3643 buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
3644 buffer_create_info.queueFamilyIndexCount = 1;
3645 buffer_create_info.pQueueFamilyIndices = &qfi;
3646
3647 VkBufferObj doit_buffer;
3648 doit_buffer.init(*m_device, buffer_create_info);
3649
3650 auto buffer = layer_data::make_unique<VkBufferObj>();
3651 buffer->init(*m_device, buffer_create_info);
3652
3653 VkDescriptorBufferInfo buffer_info[2] = {};
3654 buffer_info[0].buffer = doit_buffer.handle();
3655 buffer_info[0].offset = 0;
3656 buffer_info[0].range = sizeof(uint32_t);
3657 buffer_info[1].buffer = buffer->handle();
3658 buffer_info[1].offset = 0;
3659 buffer_info[1].range = sizeof(uint32_t);
3660
3661 VkBufferObj texel_buffer;
3662 buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
3663 texel_buffer.init(*m_device, buffer_create_info);
3664
3665 auto bvci = LvlInitStruct<VkBufferViewCreateInfo>();
3666 bvci.buffer = texel_buffer.handle();
3667 bvci.format = VK_FORMAT_R32_SFLOAT;
3668 bvci.offset = 0;
3669 bvci.range = VK_WHOLE_SIZE;
3670
3671 auto texel_bufferview = layer_data::make_unique<vk_testing::BufferView>();
3672 texel_bufferview->init(*m_device, bvci);
3673
3674 auto index_buffer_create_info = LvlInitStruct<VkBufferCreateInfo>();
3675 index_buffer_create_info.size = sizeof(uint32_t);
3676 index_buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
3677 VkBufferObj index_buffer;
3678 index_buffer.init(*m_device, index_buffer_create_info);
3679
3680 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3681 VkImageObj sampled_image(m_device);
3682 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
3683 sampled_image.Init(image_ci);
3684 auto sampled_view = layer_data::make_unique<vk_testing::ImageView>();
3685 auto imageview_ci = SafeSaneImageViewCreateInfo(sampled_image, format, VK_IMAGE_ASPECT_COLOR_BIT);
3686 sampled_view->init(*m_device, imageview_ci);
3687
3688 VkImageObj combined_image(m_device);
3689 image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
3690 combined_image.Init(image_ci);
3691 imageview_ci = SafeSaneImageViewCreateInfo(combined_image, format, VK_IMAGE_ASPECT_COLOR_BIT);
3692 auto combined_view = layer_data::make_unique<vk_testing::ImageView>();
3693 combined_view->init(*m_device, imageview_ci);
3694
3695 vk_testing::Sampler sampler;
3696 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
3697 sampler.init(*m_device, sampler_ci);
3698
3699 VkDescriptorImageInfo image_info[3] = {};
3700 image_info[0].sampler = sampler.handle();
3701 image_info[0].imageView = VK_NULL_HANDLE;
3702 image_info[0].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3703 image_info[1].sampler = VK_NULL_HANDLE;
3704 image_info[1].imageView = sampled_view->handle();
3705 image_info[1].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3706 image_info[2].sampler = sampler.handle();
3707 image_info[2].imageView = combined_view->handle();
3708 image_info[2].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3709
3710 // Update all descriptors
3711 std::array<VkWriteDescriptorSet, kNumDescriptors> descriptor_writes;
3712 descriptor_writes[0] = LvlInitStruct<VkWriteDescriptorSet>();
3713 descriptor_writes[0].dstSet = descriptor_set.set_;
3714 descriptor_writes[0].dstBinding = 0;
3715 descriptor_writes[0].descriptorCount = 1;
3716 descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
3717 descriptor_writes[0].pBufferInfo = &buffer_info[0];
3718
3719 descriptor_writes[1] = LvlInitStruct<VkWriteDescriptorSet>();
3720 descriptor_writes[1].dstSet = descriptor_set.set_;
3721 descriptor_writes[1].dstBinding = 1;
3722 descriptor_writes[1].descriptorCount = 1;
3723 descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
3724 descriptor_writes[1].pBufferInfo = &buffer_info[1];
3725
3726 descriptor_writes[2] = LvlInitStruct<VkWriteDescriptorSet>();
3727 descriptor_writes[2].dstSet = descriptor_set.set_;
3728 descriptor_writes[2].dstBinding = 2;
3729 descriptor_writes[2].descriptorCount = 1;
3730 descriptor_writes[2].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
3731 descriptor_writes[2].pTexelBufferView = &texel_bufferview->handle();
3732
3733 descriptor_writes[3] = LvlInitStruct<VkWriteDescriptorSet>();
3734 descriptor_writes[3].dstSet = descriptor_set.set_;
3735 descriptor_writes[3].dstBinding = 3;
3736 descriptor_writes[3].descriptorCount = 1;
3737 descriptor_writes[3].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
3738 descriptor_writes[3].pImageInfo = &image_info[0];
3739
3740 descriptor_writes[4] = LvlInitStruct<VkWriteDescriptorSet>();
3741 descriptor_writes[4].dstSet = descriptor_set.set_;
3742 descriptor_writes[4].dstBinding = 4;
3743 descriptor_writes[4].descriptorCount = 1;
3744 descriptor_writes[4].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
3745 descriptor_writes[4].pImageInfo = &image_info[1];
3746
3747 descriptor_writes[5] = LvlInitStruct<VkWriteDescriptorSet>();
3748 descriptor_writes[5].dstSet = descriptor_set.set_;
3749 descriptor_writes[5].dstBinding = 5;
3750 descriptor_writes[5].descriptorCount = 1;
3751 descriptor_writes[5].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
3752 descriptor_writes[5].pImageInfo = &image_info[2];
3753
3754 vk::UpdateDescriptorSets(m_device->device(), descriptor_writes.size(), descriptor_writes.data(), 0, NULL);
3755
3756 // only descriptor 0 is used, the rest are going to get destroyed
3757 char const *shader_source = R"glsl(
3758 #version 450
3759 layout(set = 0, binding = 0) uniform foo_0 { int val; } doit;
3760 layout(set = 0, binding = 1) uniform foo_1 { int val; } readit;
3761 layout(set = 0, binding = 2) uniform samplerBuffer texels;
3762 layout(set = 0, binding = 3) uniform sampler samp;
3763 layout(set = 0, binding = 4) uniform texture2D img;
3764 layout(set = 0, binding = 5) uniform sampler2D sampled_image;
3765
3766 void main() {
3767 vec4 x;
3768 vec4 y;
3769 vec4 z;
3770 if (doit.val == 0) {
3771 gl_Position = vec4(0.0);
3772 x = vec4(0.0);
3773 y = vec4(0.0);
3774 z = vec4(0.0);
3775 } else {
3776 gl_Position = vec4(readit.val);
3777 x = texelFetch(texels, 5);
3778 y = texture(sampler2D(img, samp), vec2(0));
3779 z = texture(sampled_image, vec2(0));
3780 }
3781 }
3782 )glsl";
3783
3784 VkShaderObj vs(this, shader_source, VK_SHADER_STAGE_VERTEX_BIT);
3785 VkPipelineObj pipe(m_device);
3786 pipe.AddShader(&vs);
3787 pipe.AddDefaultColorAttachment();
3788 pipe.CreateVKPipeline(pipeline_layout.handle(), m_renderPass);
sfricke-samsung6fc3e322022-02-15 22:41:29 -08003789 VkCommandBufferBeginInfo begin_info = LvlInitStruct<VkCommandBufferBeginInfo>();
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003790 m_commandBuffer->begin(&begin_info);
3791 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
3792 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
3793 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
3794 &descriptor_set.set_, 0, nullptr);
3795
3796 // destroy resources for the unused descriptors
3797 buffer.reset();
3798 texel_bufferview.reset();
3799 sampled_view.reset();
3800 combined_view.reset();
3801
3802 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), index_buffer.handle(), 0, VK_INDEX_TYPE_UINT32);
3803 VkViewport viewport = {0, 0, 16, 16, 0, 1};
3804 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
3805 VkRect2D scissor = {{0, 0}, {16, 16}};
3806 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
3807 vk::CmdDrawIndexed(m_commandBuffer->handle(), 1, 1, 0, 0, 0);
3808 vk::CmdEndRenderPass(m_commandBuffer->handle());
3809 m_commandBuffer->end();
3810 m_commandBuffer->QueueCommandBuffer();
3811 vk::QueueWaitIdle(m_device->m_queue);
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003812}
ziga-lunargc71f1a92022-03-23 23:08:35 +01003813
ziga-lunarg26ba4b92022-03-24 16:43:03 +01003814TEST_F(VkSyncValTest, TestInvalidExternalSubpassDependency) {
3815 TEST_DESCRIPTION("Test write after write hazard with invalid external subpass dependency");
3816
3817 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3818 ASSERT_NO_FATAL_FAILURE(InitState());
3819
3820 VkSubpassDependency subpass_dependency = {};
3821 subpass_dependency.srcSubpass = 0;
3822 subpass_dependency.dstSubpass = VK_SUBPASS_EXTERNAL;
3823 subpass_dependency.srcStageMask = 0;
3824 subpass_dependency.dstStageMask = 0;
3825 subpass_dependency.srcAccessMask = 0;
3826 subpass_dependency.dstAccessMask = 0;
3827 subpass_dependency.dependencyFlags = 0;
3828
3829 VkAttachmentReference attach_ref1 = {};
3830 attach_ref1.attachment = 0;
paul-lunargb01fd292022-08-24 16:59:08 +02003831 attach_ref1.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
ziga-lunarg26ba4b92022-03-24 16:43:03 +01003832 VkAttachmentReference attach_ref2 = {};
3833 attach_ref2.attachment = 0;
3834 attach_ref2.layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3835
3836 VkSubpassDescription subpass_descriptions[2] = {};
3837 subpass_descriptions[0].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
3838 subpass_descriptions[0].pDepthStencilAttachment = &attach_ref1;
3839 subpass_descriptions[1].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
3840 subpass_descriptions[1].pDepthStencilAttachment = &attach_ref2;
3841
3842 VkAttachmentDescription attachment_description = {};
3843 attachment_description.format = VK_FORMAT_D32_SFLOAT;
3844 attachment_description.samples = VK_SAMPLE_COUNT_1_BIT;
3845 attachment_description.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
3846 attachment_description.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
3847 attachment_description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
3848 attachment_description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
paul-lunargb01fd292022-08-24 16:59:08 +02003849 attachment_description.initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
ziga-lunarg26ba4b92022-03-24 16:43:03 +01003850 attachment_description.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
3851
3852 auto rp_ci = LvlInitStruct<VkRenderPassCreateInfo>();
3853 rp_ci.subpassCount = 1;
3854 rp_ci.pSubpasses = subpass_descriptions;
3855 rp_ci.attachmentCount = 1;
3856 rp_ci.pAttachments = &attachment_description;
3857 rp_ci.dependencyCount = 1;
3858 rp_ci.pDependencies = &subpass_dependency;
3859
paul-lunargb01fd292022-08-24 16:59:08 +02003860 m_errorMonitor->SetAllowedFailureMsg("VUID-VkSubpassDependency-srcStageMask-03937");
3861 m_errorMonitor->SetAllowedFailureMsg("VUID-VkSubpassDependency-dstStageMask-03937");
ziga-lunarg26ba4b92022-03-24 16:43:03 +01003862 vk_testing::RenderPass render_pass;
3863 render_pass.init(*m_device, rp_ci);
3864
3865 VkClearValue clear_value = {};
3866 clear_value.color = {{0, 0, 0, 0}};
3867
3868 VkImageCreateInfo image_ci = LvlInitStruct<VkImageCreateInfo>();
3869 image_ci.imageType = VK_IMAGE_TYPE_2D;
3870 image_ci.format = VK_FORMAT_D32_SFLOAT;
3871 image_ci.extent.width = 32;
3872 image_ci.extent.height = 32;
3873 image_ci.extent.depth = 1;
3874 image_ci.mipLevels = 1;
3875 image_ci.arrayLayers = 1;
3876 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
3877 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
3878 image_ci.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
3879
3880 VkImageObj image1(m_device);
3881 image1.init(&image_ci);
3882 ASSERT_TRUE(image1.initialized());
3883
3884 vk_testing::ImageView image_view1;
3885 VkImageViewCreateInfo iv_ci = LvlInitStruct<VkImageViewCreateInfo>();
3886 iv_ci.image = image1.handle();
3887 iv_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
3888 iv_ci.format = VK_FORMAT_D32_SFLOAT;
3889 iv_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
3890 iv_ci.subresourceRange.baseMipLevel = 0;
3891 iv_ci.subresourceRange.levelCount = 1;
3892 iv_ci.subresourceRange.baseArrayLayer = 0;
3893 iv_ci.subresourceRange.layerCount = 1;
3894 image_view1.init(*m_device, iv_ci);
3895
3896 VkImageView framebuffer_attachments[1] = {image_view1.handle()};
3897
3898 auto fb_ci = LvlInitStruct<VkFramebufferCreateInfo>();
3899 fb_ci.renderPass = render_pass.handle();
3900 fb_ci.attachmentCount = 1;
3901 fb_ci.pAttachments = framebuffer_attachments;
3902 fb_ci.width = 32;
3903 fb_ci.height = 32;
3904 fb_ci.layers = 1;
3905
3906 vk_testing::Framebuffer framebuffer;
3907 framebuffer.init(*m_device, fb_ci);
3908
3909 auto rp_bi = LvlInitStruct<VkRenderPassBeginInfo>();
3910 rp_bi.renderPass = render_pass.handle();
3911 rp_bi.framebuffer = framebuffer.handle();
3912 rp_bi.renderArea.extent.width = 32;
3913 rp_bi.renderArea.extent.height = 32;
3914 rp_bi.clearValueCount = 1;
3915 rp_bi.pClearValues = &clear_value;
3916
3917 auto ds_ci = LvlInitStruct<VkPipelineDepthStencilStateCreateInfo>();
3918 ds_ci.depthTestEnable = VK_FALSE;
3919 ds_ci.depthWriteEnable = VK_FALSE;
3920 ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
3921
3922 CreatePipelineHelper pipe(*this);
3923 pipe.InitInfo();
3924 pipe.gp_ci_.renderPass = render_pass.handle();
3925 pipe.gp_ci_.pDepthStencilState = &ds_ci;
3926 pipe.InitState();
3927 ASSERT_VK_SUCCESS(pipe.CreateGraphicsPipeline());
3928
3929 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3930
3931 m_commandBuffer->begin();
3932 m_commandBuffer->BeginRenderPass(rp_bi);
3933 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
3934 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3935 m_commandBuffer->EndRenderPass();
ziga-lunarg26ba4b92022-03-24 16:43:03 +01003936
3937 m_errorMonitor->VerifyFound();
3938}
3939
ziga-lunargc71f1a92022-03-23 23:08:35 +01003940TEST_F(VkSyncValTest, TestCopyingToCompressedImage) {
3941 TEST_DESCRIPTION("Copy from uncompressed to compressed image with and without overlap.");
3942
3943 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3944 bool copy_commands_2 = false;
3945 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME)) {
3946 m_device_extension_names.push_back(VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME);
3947 copy_commands_2 = true;
3948 }
3949 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3950
3951 VkFormatProperties format_properties;
3952 VkFormat mp_format = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
3953 vk::GetPhysicalDeviceFormatProperties(gpu(), mp_format, &format_properties);
3954 if ((format_properties.linearTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0) {
3955 printf(
3956 "%s Device does not support VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT for VK_FORMAT_BC1_RGBA_UNORM_BLOCK, skipping test.\n",
3957 kSkipPrefix);
3958 return;
3959 }
3960
3961 VkImageObj src_image(m_device);
3962 src_image.Init(1, 1, 1, VK_FORMAT_R32G32_UINT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_LINEAR);
3963 VkImageObj dst_image(m_device);
3964 dst_image.Init(12, 4, 1, VK_FORMAT_BC1_RGBA_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_LINEAR);
3965
3966 VkImageCopy copy_regions[2] = {};
3967 copy_regions[0].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3968 copy_regions[0].srcSubresource.mipLevel = 0;
3969 copy_regions[0].srcSubresource.baseArrayLayer = 0;
3970 copy_regions[0].srcSubresource.layerCount = 1;
3971 copy_regions[0].srcOffset = {0, 0, 0};
3972 copy_regions[0].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3973 copy_regions[0].dstSubresource.mipLevel = 0;
3974 copy_regions[0].dstSubresource.baseArrayLayer = 0;
3975 copy_regions[0].dstSubresource.layerCount = 1;
3976 copy_regions[0].dstOffset = {0, 0, 0};
3977 copy_regions[0].extent = {1, 1, 1};
3978 copy_regions[1].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3979 copy_regions[1].srcSubresource.mipLevel = 0;
3980 copy_regions[1].srcSubresource.baseArrayLayer = 0;
3981 copy_regions[1].srcSubresource.layerCount = 1;
3982 copy_regions[1].srcOffset = {0, 0, 0};
3983 copy_regions[1].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3984 copy_regions[1].dstSubresource.mipLevel = 0;
3985 copy_regions[1].dstSubresource.baseArrayLayer = 0;
3986 copy_regions[1].dstSubresource.layerCount = 1;
3987 copy_regions[1].dstOffset = {4, 0, 0};
3988 copy_regions[1].extent = {1, 1, 1};
3989
3990 m_commandBuffer->begin();
3991
ziga-lunargc71f1a92022-03-23 23:08:35 +01003992 vk::CmdCopyImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(),
3993 VK_IMAGE_LAYOUT_GENERAL, 1, &copy_regions[0]);
3994 vk::CmdCopyImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(),
3995 VK_IMAGE_LAYOUT_GENERAL, 1, &copy_regions[1]);
ziga-lunargc71f1a92022-03-23 23:08:35 +01003996 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "SYNC-HAZARD-WRITE_AFTER_WRITE");
paul-lunargb01fd292022-08-24 16:59:08 +02003997 copy_regions[1].dstOffset = {4, 0, 0};
ziga-lunargc71f1a92022-03-23 23:08:35 +01003998 vk::CmdCopyImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(),
3999 VK_IMAGE_LAYOUT_GENERAL, 1, &copy_regions[1]);
4000 m_errorMonitor->VerifyFound();
4001
4002 m_commandBuffer->end();
4003
4004 if (copy_commands_2) {
4005 auto vkCmdCopyImage2KHR =
4006 reinterpret_cast<PFN_vkCmdCopyImage2KHR>(vk::GetInstanceProcAddr(instance(), "vkCmdCopyImage2KHR"));
4007 assert(vkCmdCopyImage2KHR != nullptr);
4008
4009 m_commandBuffer->reset();
4010
4011 VkImageCopy2KHR copy_regions2[2];
4012 copy_regions2[0] = LvlInitStruct<VkImageCopy2KHR>();
4013 copy_regions2[0].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4014 copy_regions2[0].srcSubresource.mipLevel = 0;
4015 copy_regions2[0].srcSubresource.baseArrayLayer = 0;
4016 copy_regions2[0].srcSubresource.layerCount = 1;
4017 copy_regions2[0].srcOffset = {0, 0, 0};
4018 copy_regions2[0].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4019 copy_regions2[0].dstSubresource.mipLevel = 0;
4020 copy_regions2[0].dstSubresource.baseArrayLayer = 0;
4021 copy_regions2[0].dstSubresource.layerCount = 1;
4022 copy_regions2[0].dstOffset = {0, 0, 0};
4023 copy_regions2[0].extent = {1, 1, 1};
4024 copy_regions2[1] = LvlInitStruct<VkImageCopy2KHR>();
4025 copy_regions2[1].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4026 copy_regions2[1].srcSubresource.mipLevel = 0;
4027 copy_regions2[1].srcSubresource.baseArrayLayer = 0;
4028 copy_regions2[1].srcSubresource.layerCount = 1;
4029 copy_regions2[1].srcOffset = {0, 0, 0};
4030 copy_regions2[1].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4031 copy_regions2[1].dstSubresource.mipLevel = 0;
4032 copy_regions2[1].dstSubresource.baseArrayLayer = 0;
4033 copy_regions2[1].dstSubresource.layerCount = 1;
4034 copy_regions2[1].dstOffset = {4, 0, 0};
4035 copy_regions2[1].extent = {1, 1, 1};
4036
4037 auto copy_image_info = LvlInitStruct<VkCopyImageInfo2KHR>();
4038 copy_image_info.srcImage = src_image.handle();
4039 copy_image_info.srcImageLayout = VK_IMAGE_LAYOUT_GENERAL;
4040 copy_image_info.dstImage = dst_image.handle();
4041 copy_image_info.dstImageLayout = VK_IMAGE_LAYOUT_GENERAL;
4042 copy_image_info.regionCount = 2;
4043 copy_image_info.pRegions = copy_regions2;
4044
4045 m_commandBuffer->begin();
4046
ziga-lunargc71f1a92022-03-23 23:08:35 +01004047 vkCmdCopyImage2KHR(m_commandBuffer->handle(), &copy_image_info);
ziga-lunargc71f1a92022-03-23 23:08:35 +01004048 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4049 copy_image_info.regionCount = 1;
4050 copy_image_info.pRegions = &copy_regions2[1];
4051 copy_regions[1].dstOffset = {7, 0, 0};
4052 vkCmdCopyImage2KHR(m_commandBuffer->handle(), &copy_image_info);
4053 m_errorMonitor->VerifyFound();
4054
4055 m_commandBuffer->end();
4056 }
4057}
John Zulaufd79e34f2022-04-20 16:39:59 -06004058
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004059TEST_F(VkSyncValTest, StageAccessExpansion) {
4060 SetTargetApiVersion(VK_API_VERSION_1_2);
4061
4062 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
4063 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4064 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
4065
4066 VkImageUsageFlags image_usage_combine = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT |
4067 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4068 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
4069 VkImageObj image_c_a(m_device), image_c_b(m_device);
4070 const auto image_c_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_combine, VK_IMAGE_TILING_OPTIMAL);
4071 image_c_a.Init(image_c_ci);
4072 image_c_b.Init(image_c_ci);
4073
4074 VkImageView imageview_c = image_c_a.targetView(format);
4075 VkImageUsageFlags image_usage_storage =
4076 VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4077 VkImageObj image_s_a(m_device), image_s_b(m_device);
4078 const auto image_s_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_storage, VK_IMAGE_TILING_OPTIMAL);
4079 image_s_a.Init(image_s_ci);
4080 image_s_b.Init(image_s_ci);
4081 image_s_a.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4082 image_s_b.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4083
4084 VkImageView imageview_s = image_s_a.targetView(format);
4085
4086 vk_testing::Sampler sampler_s, sampler_c;
4087 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
4088 sampler_s.init(*m_device, sampler_ci);
4089 sampler_c.init(*m_device, sampler_ci);
4090
4091 VkBufferObj buffer_a, buffer_b;
4092 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4093 VkBufferUsageFlags buffer_usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT |
4094 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4095 buffer_a.init(*m_device, buffer_a.create_info(2048, buffer_usage, nullptr), mem_prop);
4096 buffer_b.init(*m_device, buffer_b.create_info(2048, buffer_usage, nullptr), mem_prop);
4097
4098 vk_testing::BufferView bufferview;
4099 auto bvci = LvlInitStruct<VkBufferViewCreateInfo>();
4100 bvci.buffer = buffer_a.handle();
4101 bvci.format = VK_FORMAT_R32_SFLOAT;
4102 bvci.offset = 0;
4103 bvci.range = VK_WHOLE_SIZE;
4104
4105 bufferview.init(*m_device, bvci);
4106
4107 OneOffDescriptorSet descriptor_set(m_device,
4108 {
4109 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
4110 {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
4111 {2, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
4112 {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
4113 });
4114
4115 descriptor_set.WriteDescriptorBufferInfo(0, buffer_a.handle(), 0, 2048);
4116 descriptor_set.WriteDescriptorImageInfo(1, imageview_c, sampler_c.handle(), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
4117 VK_IMAGE_LAYOUT_GENERAL);
4118 descriptor_set.WriteDescriptorImageInfo(2, imageview_s, sampler_s.handle(), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
4119 VK_IMAGE_LAYOUT_GENERAL);
4120 descriptor_set.WriteDescriptorBufferView(3, bufferview.handle());
4121 descriptor_set.UpdateDescriptorSets();
4122
4123 // Dispatch
4124 std::string csSource = R"glsl(
4125 #version 450
4126 layout(set=0, binding=0) uniform foo { float x; } ub0;
4127 layout(set=0, binding=1) uniform sampler2D cis1;
4128 layout(set=0, binding=2, rgba8) uniform readonly image2D si2;
4129 layout(set=0, binding=3, r32f) uniform readonly imageBuffer stb3;
4130 void main(){
4131 vec4 vColor4;
4132 vColor4.x = ub0.x;
4133 vColor4 = texture(cis1, vec2(0));
4134 vColor4 = imageLoad(si2, ivec2(0));
4135 vColor4 = imageLoad(stb3, 0);
4136 }
4137 )glsl";
4138
4139 // Draw
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004140 const float vbo_data[3] = {1.f, 0.f, 1.f};
4141 VkVertexInputAttributeDescription VertexInputAttributeDescription = {0, 0, VK_FORMAT_R32G32B32_SFLOAT, sizeof(vbo_data)};
4142 VkVertexInputBindingDescription VertexInputBindingDescription = {0, sizeof(vbo_data), VK_VERTEX_INPUT_RATE_VERTEX};
4143 VkBufferObj vbo, vbo2;
4144 buffer_usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4145 vbo.init(*m_device, vbo.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
4146 vbo2.init(*m_device, vbo2.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
4147
4148 VkShaderObj vs(this, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT);
Nathaniel Cesario2c8e1942022-06-21 09:15:19 -06004149 VkShaderObj fs(this, csSource.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT);
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004150
4151 CreatePipelineHelper g_pipe(*this);
4152 g_pipe.InitInfo();
4153 g_pipe.InitState();
4154 g_pipe.vi_ci_.pVertexBindingDescriptions = &VertexInputBindingDescription;
4155 g_pipe.vi_ci_.vertexBindingDescriptionCount = 1;
4156 g_pipe.vi_ci_.pVertexAttributeDescriptions = &VertexInputAttributeDescription;
4157 g_pipe.vi_ci_.vertexAttributeDescriptionCount = 1;
4158 g_pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
4159 g_pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
4160 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
4161
4162 m_commandBuffer->reset();
4163 m_commandBuffer->begin();
4164 VkImageSubresourceLayers layer{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
4165 VkOffset3D zero_offset{0, 0, 0};
4166 VkExtent3D full_extent{16, 16, 1};
4167 VkImageCopy image_region = {layer, zero_offset, layer, zero_offset, full_extent};
4168 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
4169 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4170 vk::CmdCopyImage(m_commandBuffer->handle(), image_s_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s_a.handle(),
4171 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4172
4173 auto barrier = LvlInitStruct<VkMemoryBarrier>();
4174 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
4175 barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
4176
4177 // wrong: dst stage should be VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
4178 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 1,
4179 &barrier, 0, nullptr, 0, nullptr);
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004180
4181 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
4182 VkDeviceSize offset = 0;
4183 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
4184
4185 VkViewport viewport = {0, 0, 16, 16, 0, 1};
4186 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
4187 VkRect2D scissor = {{0, 0}, {16, 16}};
4188 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
4189
4190 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
4191 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
4192 &descriptor_set.set_, 0, nullptr);
4193
4194 // one error for each image copied above
4195 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
4196 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
4197 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
4198 m_errorMonitor->VerifyFound();
4199
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004200 m_commandBuffer->EndRenderPass();
4201 m_commandBuffer->end();
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004202
4203 // Try again with the correct dst stage on the barrier
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004204 m_commandBuffer->reset();
4205 m_commandBuffer->begin();
4206 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
4207 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4208 vk::CmdCopyImage(m_commandBuffer->handle(), image_s_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s_a.handle(),
4209 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4210
4211 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 1,
4212 &barrier, 0, nullptr, 0, nullptr);
4213
4214 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
4215 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
4216
4217 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
4218 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
4219
4220 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
4221 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
4222 &descriptor_set.set_, 0, nullptr);
4223 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
4224 m_commandBuffer->EndRenderPass();
4225 m_commandBuffer->end();
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004226}
4227
John Zulaufb66ee052022-06-10 16:52:28 -06004228struct QSTestContext {
John Zulaufc55f4702022-07-15 12:16:34 -06004229 VkDeviceObj* dev;
John Zulaufb66ee052022-06-10 16:52:28 -06004230 uint32_t q_fam = ~0U;
John Zulauf6df2d5c2022-05-28 13:02:21 -06004231 VkQueue q0 = VK_NULL_HANDLE;
4232 VkQueue q1 = VK_NULL_HANDLE;
John Zulaufb66ee052022-06-10 16:52:28 -06004233
4234 VkBufferObj buffer_a;
4235 VkBufferObj buffer_b;
4236 VkBufferObj buffer_c;
4237
4238 VkBufferCopy region;
4239 VkCommandPoolObj pool;
4240
4241 VkCommandBufferObj cba;
4242 VkCommandBufferObj cbb;
4243 VkCommandBufferObj cbc;
4244
4245 VkCommandBuffer h_cba = VK_NULL_HANDLE;
4246 VkCommandBuffer h_cbb = VK_NULL_HANDLE;
4247 VkCommandBuffer h_cbc = VK_NULL_HANDLE;
4248
4249 vk_testing::Semaphore semaphore;
4250 vk_testing::Event event;
4251
4252 VkCommandBufferObj* current_cb = nullptr;
4253
John Zulaufc55f4702022-07-15 12:16:34 -06004254 QSTestContext(VkDeviceObj* device, VkQueueObj* force_q0 = nullptr, VkQueueObj* force_q1 = nullptr);
John Zulaufaa7ee262022-08-01 18:10:28 -06004255 VkCommandBuffer InitFromPool(VkCommandBufferObj& cb_obj);
John Zulaufb66ee052022-06-10 16:52:28 -06004256 bool Valid() const { return q1 != VK_NULL_HANDLE; }
4257
4258 void Begin(VkCommandBufferObj& cb);
4259 void BeginA() { Begin(cba); }
4260 void BeginB() { Begin(cbb); }
4261 void BeginC() { Begin(cbc); }
4262
4263 void End();
4264
John Zulauf169d0a02022-08-19 14:18:00 -06004265 void Copy(VkBufferObj& from, VkBufferObj& to) {
4266 vk::CmdCopyBuffer(current_cb->handle(), from.handle(), to.handle(), 1, &region);
4267 }
4268 void CopyAToB() { Copy(buffer_a, buffer_b); }
4269 void CopyAToC() { Copy(buffer_a, buffer_c); }
John Zulaufb66ee052022-06-10 16:52:28 -06004270
John Zulauf169d0a02022-08-19 14:18:00 -06004271 void CopyBToA() { Copy(buffer_b, buffer_a); }
4272 void CopyBToC() { Copy(buffer_b, buffer_c); }
John Zulaufb66ee052022-06-10 16:52:28 -06004273
John Zulauf169d0a02022-08-19 14:18:00 -06004274 void CopyCToA() { Copy(buffer_c, buffer_a); }
4275 void CopyCToB() { Copy(buffer_c, buffer_b); }
John Zulaufb66ee052022-06-10 16:52:28 -06004276
John Zulauf46f5d6b2022-06-30 12:38:34 -06004277 void CopyGeneral(const VkImageObj& from, const VkImageObj& to, const VkImageCopy& region) {
4278 vk::CmdCopyImage(current_cb->handle(), from.handle(), VK_IMAGE_LAYOUT_GENERAL, to.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
4279 &region);
4280 };
4281
John Zulaufb66ee052022-06-10 16:52:28 -06004282 VkBufferMemoryBarrier InitBufferBarrier(const VkBufferObj& buffer);
4283 void TransferBarrier(const VkBufferObj& buffer);
4284 void TransferBarrier(const VkBufferMemoryBarrier& buffer_barrier);
4285
John Zulaufc55f4702022-07-15 12:16:34 -06004286 void Submit(VkQueue q, VkCommandBufferObj& cb, VkSemaphore wait = VK_NULL_HANDLE,
John Zulaufaa7ee262022-08-01 18:10:28 -06004287 VkPipelineStageFlags wait_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VkSemaphore signal = VK_NULL_HANDLE,
4288 VkFence fence = VK_NULL_HANDLE);
John Zulaufb66ee052022-06-10 16:52:28 -06004289
John Zulauf169d0a02022-08-19 14:18:00 -06004290 // X == Submit 2 but since we already have numeric overloads for the queues X -> eXtension version
4291 void SubmitX(VkQueue q, VkCommandBufferObj& cb, VkSemaphore wait = VK_NULL_HANDLE,
4292 VkPipelineStageFlags wait_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VkSemaphore signal = VK_NULL_HANDLE,
4293 VkPipelineStageFlags signal_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VkFence fence = VK_NULL_HANDLE);
4294
John Zulaufc55f4702022-07-15 12:16:34 -06004295 void Submit0(VkCommandBufferObj& cb, VkSemaphore wait = VK_NULL_HANDLE,
John Zulaufaa7ee262022-08-01 18:10:28 -06004296 VkPipelineStageFlags wait_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VkSemaphore signal = VK_NULL_HANDLE,
4297 VkFence fence = VK_NULL_HANDLE) {
4298 Submit(q0, cb, wait, wait_mask, signal, fence);
John Zulaufb66ee052022-06-10 16:52:28 -06004299 }
4300 void Submit0Wait(VkCommandBufferObj& cb, VkPipelineStageFlags wait_mask) { Submit0(cb, semaphore.handle(), wait_mask); }
4301 void Submit0Signal(VkCommandBufferObj& cb) { Submit0(cb, VK_NULL_HANDLE, 0U, semaphore.handle()); }
4302
John Zulaufc55f4702022-07-15 12:16:34 -06004303 void Submit1(VkCommandBufferObj& cb, VkSemaphore wait = VK_NULL_HANDLE,
John Zulaufaa7ee262022-08-01 18:10:28 -06004304 VkPipelineStageFlags wait_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VkSemaphore signal = VK_NULL_HANDLE,
4305 VkFence fence = VK_NULL_HANDLE) {
4306 Submit(q1, cb, wait, wait_mask, signal, fence);
John Zulaufb66ee052022-06-10 16:52:28 -06004307 }
4308 void Submit1Wait(VkCommandBufferObj& cb, VkPipelineStageFlags wait_mask) { Submit1(cb, semaphore.handle(), wait_mask); }
4309 void Submit1Signal(VkCommandBufferObj& cb, VkPipelineStageFlags signal_mask) {
4310 Submit1(cb, VK_NULL_HANDLE, 0U, semaphore.handle());
4311 }
4312 void SetEvent(VkPipelineStageFlags src_mask) { event.cmd_set(*current_cb, src_mask); }
4313 void WaitEventBufferTransfer(VkBufferObj& buffer, VkPipelineStageFlags src_mask, VkPipelineStageFlags dst_mask) {
4314 std::vector<VkBufferMemoryBarrier> buffer_barriers(1, InitBufferBarrier(buffer));
4315 event.cmd_wait(*current_cb, src_mask, dst_mask, std::vector<VkMemoryBarrier>(), buffer_barriers,
4316 std::vector<VkImageMemoryBarrier>());
4317 }
John Zulaufc55f4702022-07-15 12:16:34 -06004318 void QueueWait(VkQueue q) { vk::QueueWaitIdle(q); }
4319 void QueueWait0() { QueueWait(q0); }
4320 void QueueWait1() { QueueWait(q1); }
4321 void DeviceWait() { vk::DeviceWaitIdle(dev->handle()); }
John Zulauf169d0a02022-08-19 14:18:00 -06004322
4323 void RecordCopy(VkCommandBufferObj& cb, VkBufferObj& from, VkBufferObj& to);
John Zulaufb66ee052022-06-10 16:52:28 -06004324};
4325
John Zulaufc55f4702022-07-15 12:16:34 -06004326QSTestContext::QSTestContext(VkDeviceObj* device, VkQueueObj* force_q0, VkQueueObj* force_q1)
4327 : dev(device), q0(VK_NULL_HANDLE), q1(VK_NULL_HANDLE) {
4328 if (force_q0) {
4329 q0 = force_q0->handle();
4330 q_fam = force_q0->get_family_index();
4331 if (force_q1) {
4332 // The object has some assumptions that the queues are from the the same family, so enforce this here
4333 if (force_q1->get_family_index() == q_fam) {
4334 q1 = force_q1->handle();
4335 }
4336 } else {
4337 q1 = q0; // Allow the two queues to be the same and valid if forced
4338 }
4339 } else {
4340 const auto& queues = device->dma_queues();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004341
John Zulaufc55f4702022-07-15 12:16:34 -06004342 const uint32_t q_count = static_cast<uint32_t>(queues.size());
4343 for (uint32_t q0_index = 0; q0_index < q_count; ++q0_index) {
4344 const auto* q0_entry = queues[q0_index];
4345 q0 = q0_entry->handle();
4346 q_fam = q0_entry->get_family_index();
4347 for (uint32_t q1_index = (q0_index + 1); q1_index < q_count; ++q1_index) {
4348 const auto* q1_entry = queues[q1_index];
4349 if (q_fam == q1_entry->get_family_index()) {
4350 q1 = q1_entry->handle();
4351 break;
4352 }
4353 }
4354 if (Valid()) {
John Zulauf6df2d5c2022-05-28 13:02:21 -06004355 break;
4356 }
4357 }
John Zulauf6df2d5c2022-05-28 13:02:21 -06004358 }
John Zulaufc55f4702022-07-15 12:16:34 -06004359
John Zulaufb66ee052022-06-10 16:52:28 -06004360 if (!Valid()) return;
4361
4362 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4363 buffer_a.init_as_src_and_dst(*device, 256, mem_prop);
4364 buffer_b.init_as_src_and_dst(*device, 256, mem_prop);
4365 buffer_c.init_as_src_and_dst(*device, 256, mem_prop);
4366
4367 region = {0, 0, 256};
4368
4369 pool.Init(device, q_fam, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004370
John Zulaufaa7ee262022-08-01 18:10:28 -06004371 h_cba = InitFromPool(cba);
4372 h_cbb = InitFromPool(cbb);
4373 h_cbc = InitFromPool(cbc);
John Zulaufb66ee052022-06-10 16:52:28 -06004374
4375 auto semaphore_ci = LvlInitStruct<VkSemaphoreCreateInfo>();
4376 semaphore.init(*device, semaphore_ci);
4377
4378 VkEventCreateInfo eci = LvlInitStruct<VkEventCreateInfo>();
4379 event.init(*device, eci);
4380}
4381
John Zulaufaa7ee262022-08-01 18:10:28 -06004382VkCommandBuffer QSTestContext::InitFromPool(VkCommandBufferObj& cb_obj) {
4383 cb_obj.Init(dev, &pool);
4384 return cb_obj.handle();
4385}
4386
John Zulaufb66ee052022-06-10 16:52:28 -06004387void QSTestContext::Begin(VkCommandBufferObj& cb) {
John Zulaufc55f4702022-07-15 12:16:34 -06004388 VkCommandBufferBeginInfo info = LvlInitStruct<VkCommandBufferBeginInfo>();
4389 info.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4390 info.pInheritanceInfo = nullptr;
4391
John Zulaufb66ee052022-06-10 16:52:28 -06004392 cb.reset();
John Zulaufc55f4702022-07-15 12:16:34 -06004393 cb.begin(&info);
John Zulaufb66ee052022-06-10 16:52:28 -06004394 current_cb = &cb;
4395}
4396
4397void QSTestContext::End() {
4398 current_cb->end();
4399 current_cb = nullptr;
4400}
4401
4402VkBufferMemoryBarrier QSTestContext::InitBufferBarrier(const VkBufferObj& buffer) {
4403 auto buffer_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
4404 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
4405 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
4406 buffer_barrier.buffer = buffer.handle();
4407 buffer_barrier.offset = 0;
4408 buffer_barrier.size = 256;
4409 return buffer_barrier;
4410}
4411
4412void QSTestContext::TransferBarrier(const VkBufferMemoryBarrier& buffer_barrier) {
4413 vk::CmdPipelineBarrier(current_cb->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1,
4414 &buffer_barrier, 0, nullptr);
4415}
4416
4417void QSTestContext::TransferBarrier(const VkBufferObj& buffer) { TransferBarrier(InitBufferBarrier(buffer)); }
4418
John Zulaufaa7ee262022-08-01 18:10:28 -06004419void QSTestContext::Submit(VkQueue q, VkCommandBufferObj& cb, VkSemaphore wait, VkPipelineStageFlags wait_mask, VkSemaphore signal,
4420 VkFence fence) {
John Zulaufb66ee052022-06-10 16:52:28 -06004421 auto submit1 = lvl_init_struct<VkSubmitInfo>();
4422 submit1.commandBufferCount = 1;
4423 VkCommandBuffer h_cb = cb.handle();
4424 submit1.pCommandBuffers = &h_cb;
4425 if (wait != VK_NULL_HANDLE) {
4426 submit1.waitSemaphoreCount = 1;
4427 submit1.pWaitSemaphores = &wait;
4428 submit1.pWaitDstStageMask = &wait_mask;
4429 }
4430 if (signal != VK_NULL_HANDLE) {
4431 submit1.signalSemaphoreCount = 1;
4432 submit1.pSignalSemaphores = &signal;
4433 }
John Zulaufaa7ee262022-08-01 18:10:28 -06004434 vk::QueueSubmit(q, 1, &submit1, fence);
John Zulaufb66ee052022-06-10 16:52:28 -06004435}
4436
John Zulauf169d0a02022-08-19 14:18:00 -06004437void QSTestContext::SubmitX(VkQueue q, VkCommandBufferObj& cb, VkSemaphore wait, VkPipelineStageFlags wait_mask, VkSemaphore signal,
4438 VkPipelineStageFlags signal_mask, VkFence fence) {
4439 auto submit1 = lvl_init_struct<VkSubmitInfo2>();
4440 auto cb_info = lvl_init_struct<VkCommandBufferSubmitInfo>();
4441 auto wait_info = lvl_init_struct<VkSemaphoreSubmitInfo>();
4442 auto signal_info = lvl_init_struct<VkSemaphoreSubmitInfo>();
4443
4444 cb_info.commandBuffer = cb.handle();
4445 submit1.commandBufferInfoCount = 1;
4446 submit1.pCommandBufferInfos = &cb_info;
4447
4448 if (wait != VK_NULL_HANDLE) {
4449 wait_info.semaphore = wait;
4450 wait_info.stageMask = wait_mask;
4451 submit1.waitSemaphoreInfoCount = 1;
4452 submit1.pWaitSemaphoreInfos = &wait_info;
4453 }
4454 if (signal != VK_NULL_HANDLE) {
4455 signal_info.semaphore = signal;
4456 signal_info.stageMask = signal_mask;
4457 submit1.signalSemaphoreInfoCount = 1;
4458 submit1.pSignalSemaphoreInfos = &signal_info;
4459 }
4460
4461 vk::QueueSubmit2(q, 1, &submit1, fence);
4462}
4463
4464inline void QSTestContext::RecordCopy(VkCommandBufferObj& cb, VkBufferObj& from, VkBufferObj& to) {
4465 Begin(cb);
4466 Copy(from, to);
4467 End();
4468}
4469
John Zulaufb9fad9f2022-07-15 11:10:37 -06004470TEST_F(VkSyncValTest, SyncQSBufferCopyHazards) {
4471 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4472 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4473
John Zulauf4e3bb772022-08-18 16:58:56 -06004474 QSTestContext test(m_device, m_device->m_queue_obj);
4475 if (!test.Valid()) {
4476 GTEST_SKIP() << "Test requires a valid queue object.";
4477 }
John Zulaufb9fad9f2022-07-15 11:10:37 -06004478
John Zulauf169d0a02022-08-19 14:18:00 -06004479 test.RecordCopy(test.cba, test.buffer_a, test.buffer_b);
4480 test.RecordCopy(test.cbb, test.buffer_c, test.buffer_a);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004481
4482 auto submit1 = lvl_init_struct<VkSubmitInfo>();
4483 submit1.commandBufferCount = 2;
John Zulauf4e3bb772022-08-18 16:58:56 -06004484 VkCommandBuffer two_cbs[2] = {test.h_cba, test.h_cbb};
John Zulaufb9fad9f2022-07-15 11:10:37 -06004485 submit1.pCommandBuffers = two_cbs;
4486
4487 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulauf4e3bb772022-08-18 16:58:56 -06004488 vk::QueueSubmit(test.q0, 1, &submit1, VK_NULL_HANDLE);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004489 m_errorMonitor->VerifyFound();
4490
John Zulauf4e3bb772022-08-18 16:58:56 -06004491 test.DeviceWait();
John Zulaufb9fad9f2022-07-15 11:10:37 -06004492
4493 VkSubmitInfo submit2[2] = {lvl_init_struct<VkSubmitInfo>(), lvl_init_struct<VkSubmitInfo>()};
4494 submit2[0].commandBufferCount = 1;
John Zulauf4e3bb772022-08-18 16:58:56 -06004495 submit2[0].pCommandBuffers = &test.h_cba;
John Zulaufb9fad9f2022-07-15 11:10:37 -06004496 submit2[1].commandBufferCount = 1;
John Zulauf4e3bb772022-08-18 16:58:56 -06004497 submit2[1].pCommandBuffers = &test.h_cbb;
John Zulaufb9fad9f2022-07-15 11:10:37 -06004498 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulauf4e3bb772022-08-18 16:58:56 -06004499 vk::QueueSubmit(test.q0, 2, submit2, VK_NULL_HANDLE);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004500 m_errorMonitor->VerifyFound();
4501
4502 // With the skip settings, the above QueueSubmit's didn't record, so we can treat the global queue contexts as empty
John Zulauf4e3bb772022-08-18 16:58:56 -06004503 test.Submit0(test.cba);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004504
John Zulaufb9fad9f2022-07-15 11:10:37 -06004505 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulauf4e3bb772022-08-18 16:58:56 -06004506 test.Submit0(test.cbb);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004507 m_errorMonitor->VerifyFound();
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004508
John Zulauf4e3bb772022-08-18 16:58:56 -06004509 test.DeviceWait();
John Zulaufb9fad9f2022-07-15 11:10:37 -06004510}
4511
John Zulauf169d0a02022-08-19 14:18:00 -06004512TEST_F(VkSyncValTest, SyncQSSubmit2) {
4513 SetTargetApiVersion(VK_API_VERSION_1_3);
4514 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4515 if (DeviceValidationVersion() < VK_API_VERSION_1_3) {
4516 GTEST_SKIP() << "At least Vulkan version 1.3 is required";
4517 }
paul-lunargb01fd292022-08-24 16:59:08 +02004518 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME)) {
4519 m_device_extension_names.push_back(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
4520 } else {
4521 GTEST_SKIP() << "Synchronization2 not supported";
4522 }
John Zulauf169d0a02022-08-19 14:18:00 -06004523
paul-lunargb01fd292022-08-24 16:59:08 +02004524 if (!CheckSynchronization2SupportAndInitState(this)) {
4525 GTEST_SKIP() << "Synchronization2 not supported";
4526 }
John Zulauf169d0a02022-08-19 14:18:00 -06004527
4528 QSTestContext test(m_device, m_device->m_queue_obj);
4529 if (!test.Valid()) {
4530 GTEST_SKIP() << "Test requires a valid queue object.";
4531 }
4532
4533 test.RecordCopy(test.cba, test.buffer_a, test.buffer_b);
4534 test.RecordCopy(test.cbb, test.buffer_c, test.buffer_a);
4535
4536 // Test that the signal mask is controlling the first scope
4537 test.SubmitX(test.q0, test.cba, VK_NULL_HANDLE, 0, test.semaphore.handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
4538 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
4539 test.Submit0(test.cbb);
4540 m_errorMonitor->VerifyFound();
4541
4542 // Since the last submit skipped, we need a wait that will success
4543 test.BeginC();
4544 test.End();
4545 test.Submit0Wait(test.cbc, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
4546 test.DeviceWait();
4547
4548 // This time with the correct first sync scope.
4549 test.SubmitX(test.q0, test.cba, VK_NULL_HANDLE, 0, test.semaphore.handle(), VK_PIPELINE_STAGE_TRANSFER_BIT);
4550 test.Submit0Wait(test.cbb, VK_PIPELINE_STAGE_TRANSFER_BIT);
4551
4552 test.DeviceWait();
4553}
4554
John Zulaufb9fad9f2022-07-15 11:10:37 -06004555TEST_F(VkSyncValTest, SyncQSBufferCopyVsIdle) {
John Zulaufb9fad9f2022-07-15 11:10:37 -06004556 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4557 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4558
John Zulaufc55f4702022-07-15 12:16:34 -06004559 QSTestContext test(m_device, m_device->m_queue_obj);
4560 if (!test.Valid()) {
4561 GTEST_SKIP() << "Test requires a valid queue object.";
4562 }
John Zulaufb9fad9f2022-07-15 11:10:37 -06004563
John Zulauf169d0a02022-08-19 14:18:00 -06004564 test.RecordCopy(test.cba, test.buffer_a, test.buffer_b);
4565 test.RecordCopy(test.cbb, test.buffer_c, test.buffer_a);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004566
4567 // Submit A
John Zulaufc55f4702022-07-15 12:16:34 -06004568 test.Submit0(test.cba);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004569
4570 // Submit B which hazards vs. A
John Zulaufb9fad9f2022-07-15 11:10:37 -06004571 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004572 test.Submit0(test.cbb);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004573 m_errorMonitor->VerifyFound();
4574
4575 // With the skip settings, the above QueueSubmit's didn't record, so we can treat the previous submit as not
4576 // having happened. So we'll try again with a device wait idle
4577 // Submit B again, but after idling, which should remove the hazard
John Zulaufc55f4702022-07-15 12:16:34 -06004578 test.DeviceWait();
4579 test.Submit0(test.cbb);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004580
John Zulaufc55f4702022-07-15 12:16:34 -06004581 // Submit the same command again for another hazard
John Zulaufb9fad9f2022-07-15 11:10:37 -06004582 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
John Zulaufc55f4702022-07-15 12:16:34 -06004583 test.Submit0(test.cbb);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004584 m_errorMonitor->VerifyFound();
4585
4586 // With the skip settings, the above QueueSubmit's didn't record, so we can treat the previous submit as not
4587 // having happened. So we'll try again with a queue wait idle
4588 // Submit B again, but after idling, which should remove the hazard
John Zulaufc55f4702022-07-15 12:16:34 -06004589 test.QueueWait0();
4590 test.Submit0(test.cbb);
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004591
4592 m_device->wait();
John Zulaufb9fad9f2022-07-15 11:10:37 -06004593}
4594
John Zulaufaa7ee262022-08-01 18:10:28 -06004595TEST_F(VkSyncValTest, SyncQSBufferCopyVsFence) {
4596 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4597 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4598
4599 QSTestContext test(m_device, m_device->m_queue_obj);
4600 if (!test.Valid()) {
4601 GTEST_SKIP() << "Test requires a valid queue object.";
4602 }
4603
4604 vk_testing::Fence fence;
4605 fence.init(*m_device, VkFenceObj::create_info());
4606 VkFence fence_handle = fence.handle();
4607 VkResult wait_result;
4608 VkCommandBufferObj cbd;
4609 test.InitFromPool(cbd);
4610
4611 // Set up four CB with copy commands
4612 // We'll wait for the first, but not the second
John Zulauf169d0a02022-08-19 14:18:00 -06004613 test.RecordCopy(test.cba, test.buffer_a, test.buffer_b);
4614 test.RecordCopy(test.cbb, test.buffer_a, test.buffer_c);
4615 test.RecordCopy(test.cbc, test.buffer_a, test.buffer_b);
John Zulaufaa7ee262022-08-01 18:10:28 -06004616
4617 // This is the one that should error
John Zulauf169d0a02022-08-19 14:18:00 -06004618 test.RecordCopy(cbd, test.buffer_a, test.buffer_c);
John Zulaufaa7ee262022-08-01 18:10:28 -06004619
4620 // Two copies *better* finish in a second...
4621 const uint64_t kFourSeconds = 1U << 30;
4622 // Copy A to B
4623 test.Submit0(test.cba, VK_NULL_HANDLE, 0U, VK_NULL_HANDLE, fence_handle);
4624 // Copy A to C
4625 test.Submit0(test.cbb);
4626 // Wait for A to B
4627 wait_result = fence.wait(kFourSeconds);
4628
4629 if (wait_result != VK_SUCCESS) {
4630 ADD_FAILURE() << "Fence wait failed. Aborting test.";
4631 m_device->wait();
4632 }
4633
4634 // A and B should be good to go...
4635 test.Submit0(test.cbc);
4636
4637 // But C shouldn't
4638 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4639 test.Submit0(cbd);
4640 m_errorMonitor->VerifyFound();
4641
4642 test.DeviceWait();
4643}
4644
John Zulaufb66ee052022-06-10 16:52:28 -06004645TEST_F(VkSyncValTest, SyncQSBufferCopyQSORules) {
4646 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4647 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4648
4649 QSTestContext test(m_device);
4650 if (!test.Valid()) {
John Zulauf6df2d5c2022-05-28 13:02:21 -06004651 printf("%s Test requires at least 2 TRANSFER capable queues in the same queue_family. Skipped.\n", kSkipPrefix);
4652 return;
4653 }
4654
John Zulauf6df2d5c2022-05-28 13:02:21 -06004655 // Command Buffer A reads froms buffer A and writes to buffer B
John Zulauf169d0a02022-08-19 14:18:00 -06004656 test.RecordCopy(test.cba, test.buffer_a, test.buffer_b);
John Zulauf6df2d5c2022-05-28 13:02:21 -06004657
4658 // Command Buffer B reads froms buffer C and writes to buffer A, but has a barrier to protect the write to A when
4659 // executed on the same queue, given that commands in "queue submission order" are within the first scope of the barrier.
John Zulaufb66ee052022-06-10 16:52:28 -06004660 test.BeginB();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004661
4662 // Use the barrier to clean up the WAR, which will work for command buffers ealier in queue submission order, or with
4663 // correct semaphore operations between queues.
John Zulaufb66ee052022-06-10 16:52:28 -06004664 test.TransferBarrier(test.buffer_a);
4665 test.CopyCToA();
4666 test.End();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004667
John Zulaufd060c3f2022-06-08 16:00:46 -06004668 // Command Buffer C does the same copy as B but without the barrier.
John Zulauf169d0a02022-08-19 14:18:00 -06004669 test.RecordCopy(test.cbc, test.buffer_c, test.buffer_a);
John Zulaufd060c3f2022-06-08 16:00:46 -06004670
John Zulauf6df2d5c2022-05-28 13:02:21 -06004671 // Submit A and B on the same queue, to assure us the barrier *would* be sufficient given QSO
4672 // This is included in a "Sucess" section, just to verify CBA and CBB are set up correctly.
John Zulaufb66ee052022-06-10 16:52:28 -06004673 test.Submit0(test.cba);
4674 test.Submit0(test.cbb);
John Zulauf6df2d5c2022-05-28 13:02:21 -06004675 m_device->wait(); // DeviceWaitIdle, clearing the field for the next subcase
John Zulauf6df2d5c2022-05-28 13:02:21 -06004676
4677 // Submit A and B on the different queues. Since no semaphore is used between the queues, CB B hazards asynchronously with,
4678 // CB A with A being read and written on independent queues.
4679 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ");
John Zulaufb66ee052022-06-10 16:52:28 -06004680 test.Submit0(test.cba);
4681 test.Submit1(test.cbb);
John Zulauf6df2d5c2022-05-28 13:02:21 -06004682 m_errorMonitor->VerifyFound();
4683
4684 // Set up the semaphore for the next two cases
John Zulauf6df2d5c2022-05-28 13:02:21 -06004685
4686 m_device->wait();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004687
4688 // Submit A and B on the different queues, with an ineffectual semaphore. The wait mask is empty, thus nothing in CB B is in
4689 // the second excution scope of the waited signal.
4690 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufb66ee052022-06-10 16:52:28 -06004691 test.Submit0Signal(test.cba);
John Zulaufc55f4702022-07-15 12:16:34 -06004692 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT); // wait mask is BOTTOM, s.t. this is a wait-for-nothing.
John Zulauf6df2d5c2022-05-28 13:02:21 -06004693 m_errorMonitor->VerifyFound();
4694
4695 // The since second submit failed, it was skipped. So we can try again, without having to WaitDeviceIdle
John Zulaufb66ee052022-06-10 16:52:28 -06004696 // Include transfers in the second execution scope of the waited signal, s.t. the PipelineBarrier in CB B can chain with it.
4697 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_TRANSFER_BIT); //
John Zulaufd060c3f2022-06-08 16:00:46 -06004698
4699 m_device->wait();
4700
4701 // Draw A and then C to verify the second access scope of the signal
John Zulaufb66ee052022-06-10 16:52:28 -06004702 test.Submit0Signal(test.cba);
4703 test.Submit1Wait(test.cbc, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulaufd060c3f2022-06-08 16:00:46 -06004704
4705 m_device->wait();
4706
4707 // ... and again on the same queue
John Zulaufb66ee052022-06-10 16:52:28 -06004708 test.Submit0Signal(test.cba);
4709 test.Submit0Wait(test.cbc, VK_PIPELINE_STAGE_TRANSFER_BIT);
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004710
4711 m_device->wait();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004712}
John Zulaufb66ee052022-06-10 16:52:28 -06004713
4714TEST_F(VkSyncValTest, SyncQSBufferEvents) {
4715 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4716 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4717
4718 QSTestContext test(m_device);
4719 if (!test.Valid()) {
4720 printf("%s Test requires at least 2 TRANSFER capable queues in the same queue_family. Skipped.\n", kSkipPrefix);
4721 return;
4722 }
4723
John Zulaufb66ee052022-06-10 16:52:28 -06004724 // Command Buffer A reads froms buffer A and writes to buffer B
4725 test.BeginA();
4726 test.CopyAToB();
4727 test.SetEvent(VK_PIPELINE_STAGE_TRANSFER_BIT);
4728 test.End();
4729
4730 // Command Buffer B reads froms buffer C and writes to buffer A, but has a wait to protect the write to A when
4731 // executed on the same queue, given that commands in "queue submission order" are within the first scope of the barrier.
4732 test.BeginB();
4733
4734 // Use the barrier to clean up the WAR, which will work for command buffers ealier in queue submission order, or with
4735 // correct semaphore operations between queues.
4736 test.WaitEventBufferTransfer(test.buffer_a, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
4737 test.CopyCToA();
4738 test.End();
4739
4740 // Command Buffer C merges the operations from A and B, to ensure the set/wait is correct.
4741 // reads froms buffer A and writes to buffer B
4742 // reads froms buffer C and writes to buffer A, but has a barrier to protect the write to A when
4743 test.BeginC();
4744 test.CopyAToB();
4745 test.SetEvent(VK_PIPELINE_STAGE_TRANSFER_BIT);
4746 test.WaitEventBufferTransfer(test.buffer_a, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
4747 test.CopyCToA();
4748 test.End();
4749
4750 test.Submit0(test.cba);
4751 test.Submit0(test.cbb);
4752
4753 // Ensure that the wait doesn't apply to async queues
4754 m_device->wait();
4755 test.Submit0(test.cba);
John Zulaufb66ee052022-06-10 16:52:28 -06004756 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ");
4757 test.Submit1(test.cbb);
4758 m_errorMonitor->VerifyFound();
4759
4760 // Ensure that the wait doesn't apply to access on other synchronized queues
John Zulaufb66ee052022-06-10 16:52:28 -06004761 m_device->wait();
4762
4763 test.Submit0Signal(test.cba);
John Zulaufb66ee052022-06-10 16:52:28 -06004764 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004765 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004766 m_errorMonitor->VerifyFound();
4767
4768 // Need to have a successful signal wait to get the semaphore in a usuable state.
John Zulaufb66ee052022-06-10 16:52:28 -06004769 test.BeginC();
4770 test.End();
John Zulaufc55f4702022-07-15 12:16:34 -06004771 test.Submit1Wait(test.cbc, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004772 m_device->wait();
4773
4774 // Next ensure that accesses from other queues aren't included in the first scope
John Zulauf169d0a02022-08-19 14:18:00 -06004775 test.RecordCopy(test.cba, test.buffer_a, test.buffer_b);
John Zulaufb66ee052022-06-10 16:52:28 -06004776
4777 test.BeginB();
4778 test.SetEvent(VK_PIPELINE_STAGE_TRANSFER_BIT);
4779 test.WaitEventBufferTransfer(test.buffer_a, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
4780 test.CopyCToA();
4781 test.End();
4782
4783 test.Submit0Signal(test.cba);
John Zulaufb66ee052022-06-10 16:52:28 -06004784 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004785 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004786 m_errorMonitor->VerifyFound();
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004787
4788 m_device->wait();
John Zulaufb66ee052022-06-10 16:52:28 -06004789}
John Zulauf46f5d6b2022-06-30 12:38:34 -06004790
4791TEST_F(VkSyncValTest, SyncQSOBarrierHazard) {
4792 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4793 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4794
4795 QSTestContext test(m_device);
4796 if (!test.Valid()) {
4797 GTEST_SKIP() << "Test requires at least 2 TRANSFER capable queues in the same queue_family.";
4798 }
4799
4800 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4801 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
4802 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
4803
4804 VkImageObj image_a(m_device);
4805 image_a.Init(image_ci);
4806 ASSERT_TRUE(image_a.initialized());
4807 image_a.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4808
4809 VkImageObj image_b(m_device);
4810 image_b.Init(image_ci);
4811 ASSERT_TRUE(image_b.initialized());
4812 image_b.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4813
4814 VkImageSubresourceLayers all_layers{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
4815 VkOffset3D zero_offset{0, 0, 0};
4816 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
4817 VkImageCopy full_region = {all_layers, zero_offset, all_layers, zero_offset, full_extent};
4818
4819 test.BeginA();
4820 test.CopyGeneral(image_a, image_b, full_region);
4821 test.End();
4822
4823 test.BeginB();
4824 image_a.ImageMemoryBarrier(test.current_cb, VK_IMAGE_ASPECT_COLOR_BIT, VK_ACCESS_NONE, VK_ACCESS_NONE,
John Zulaufc55f4702022-07-15 12:16:34 -06004825 VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, VK_PIPELINE_STAGE_TRANSFER_BIT,
4826 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulauf46f5d6b2022-06-30 12:38:34 -06004827 test.End();
4828
4829 // We're going to do the copy first, then use the skip on fail, to test three different ways...
4830 test.Submit0Signal(test.cba);
4831
4832 // First asynchronously fail -- the pipeline barrier in B shouldn't work on queue 1
4833 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ ");
4834 test.Submit1(test.cbb);
4835 m_errorMonitor->VerifyFound();
4836
4837 // Next synchronously fail -- the pipeline barrier in B shouldn't work on queue 1
4838 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004839 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulauf46f5d6b2022-06-30 12:38:34 -06004840 m_errorMonitor->VerifyFound();
4841
4842 // Then prove qso works (note that with the failure, the semaphore hasn't been waited, nor the layout changed)
John Zulaufc55f4702022-07-15 12:16:34 -06004843 test.Submit0Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004844
4845 m_device->wait();
John Zulauf46f5d6b2022-06-30 12:38:34 -06004846}
John Zulauf2f5947d2022-07-27 15:36:31 -06004847
4848TEST_F(VkSyncValTest, SyncQSRenderPass) {
4849 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4850 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4851 if (IsPlatform(kNexusPlayer)) {
4852 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
4853 return;
4854 }
4855
John Zulauf2f5947d2022-07-27 15:36:31 -06004856 CreateRenderPassHelper rp_helper(m_device);
4857 rp_helper.InitAllAttachmentsToLayoutGeneral();
4858
4859 rp_helper.InitState();
4860 rp_helper.InitAttachmentLayouts(); // Quiet any CoreChecks ImageLayout complaints
4861 m_device->wait(); // and quiesce the system
4862
4863 // The dependency protects the input attachment but not the color attachment
4864 rp_helper.subpass_dep.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
4865 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT,
4866 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, 0U});
4867
4868 rp_helper.InitRenderPass();
4869 rp_helper.InitFramebuffer();
4870 rp_helper.InitBeginInfo();
4871
4872 VkCommandBufferObj cb0(m_device, m_commandPool);
4873 VkCommandBufferObj cb1(m_device, m_commandPool);
4874
4875 auto do_begin_rp = [&rp_helper](VkCommandBufferObj& cb_obj) { cb_obj.BeginRenderPass(rp_helper.render_pass_begin); };
4876
4877 auto do_clear = [&rp_helper](VkCommandBufferObj& cb_obj) {
4878 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
4879 vk::CmdClearColorImage(cb_obj.handle(), rp_helper.image_input->handle(), VK_IMAGE_LAYOUT_GENERAL, &rp_helper.ccv, 1,
4880 &full_subresource_range);
4881 vk::CmdClearColorImage(cb_obj.handle(), rp_helper.image_color->handle(), VK_IMAGE_LAYOUT_GENERAL, &rp_helper.ccv, 1,
4882 &full_subresource_range);
4883 };
4884
4885 // Single renderpass barrier (sanity check)
4886 cb0.begin();
4887 do_clear(cb0);
John Zulauf2f5947d2022-07-27 15:36:31 -06004888 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4889 do_begin_rp(cb0);
4890 m_errorMonitor->VerifyFound();
4891 // No "end render pass" as the begin fails
4892
John Zulauf2f5947d2022-07-27 15:36:31 -06004893 cb0.end();
4894 cb0.reset();
4895
4896 // Inter CB detection (dual cb), load is safe, clear errors at submit time
4897 cb0.begin();
4898 do_clear(cb0);
4899 cb0.end();
4900
4901 cb1.begin();
4902 do_begin_rp(cb1);
4903 cb1.EndRenderPass();
4904 cb1.end();
4905
4906 auto submit2 = lvl_init_struct<VkSubmitInfo>();
4907 VkCommandBuffer two_cbs[2] = {cb0.handle(), cb1.handle()};
4908 submit2.commandBufferCount = 2;
4909 submit2.pCommandBuffers = two_cbs;
John Zulauf2f5947d2022-07-27 15:36:31 -06004910 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4911 vk::QueueSubmit(m_device->m_queue, 1, &submit2, VK_NULL_HANDLE);
4912 m_errorMonitor->VerifyFound();
4913}