blob: 9b902e23821ae989bd959d29425cee21504faa83 [file] [log] [blame]
Jeremy Gebben170781d2020-11-19 16:21:21 -07001/*
sfricke-samsungae54c1e2022-01-21 05:35:21 -08002 * Copyright (c) 2015-2022 The Khronos Group Inc.
3 * Copyright (c) 2015-2022 Valve Corporation
4 * Copyright (c) 2015-2022 LunarG, Inc.
5 * Copyright (c) 2015-2022 Google, Inc.
Jeremy Gebben170781d2020-11-19 16:21:21 -07006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Author: Chia-I Wu <olvaffe@gmail.com>
14 * Author: Chris Forbes <chrisf@ijw.co.nz>
15 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
16 * Author: Mark Lobodzinski <mark@lunarg.com>
17 * Author: Mike Stroyan <mike@LunarG.com>
18 * Author: Tobin Ehlis <tobine@google.com>
19 * Author: Tony Barbour <tony@LunarG.com>
20 * Author: Cody Northrop <cnorthrop@google.com>
21 * Author: Dave Houlton <daveh@lunarg.com>
22 * Author: Jeremy Kniager <jeremyk@lunarg.com>
23 * Author: Shannon McPherson <shannon@lunarg.com>
24 * Author: John Zulauf <jzulauf@lunarg.com>
25 */
26#include <type_traits>
27
28#include "cast_utils.h"
29#include "layer_validation_tests.h"
30
31TEST_F(VkSyncValTest, SyncBufferCopyHazards) {
32 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
33 if (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_BUFFER_MARKER_EXTENSION_NAME)) {
34 m_device_extension_names.push_back(VK_AMD_BUFFER_MARKER_EXTENSION_NAME);
35 }
36 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
37 bool has_amd_buffer_maker = DeviceExtensionEnabled(VK_AMD_BUFFER_MARKER_EXTENSION_NAME);
38
39 VkBufferObj buffer_a;
40 VkBufferObj buffer_b;
41 VkBufferObj buffer_c;
42 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
43 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
44 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
45 buffer_c.init_as_src_and_dst(*m_device, 256, mem_prop);
46
47 VkBufferCopy region = {0, 0, 256};
48 VkBufferCopy front2front = {0, 0, 128};
49 VkBufferCopy front2back = {0, 128, 128};
50 VkBufferCopy back2back = {128, 128, 128};
51
52 auto cb = m_commandBuffer->handle();
53 m_commandBuffer->begin();
54
55 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
56
57 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
58 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
59 m_errorMonitor->VerifyFound();
60
61 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -070062 auto buffer_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -070063 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
64 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
65 buffer_barrier.buffer = buffer_a.handle();
66 buffer_barrier.offset = 0;
67 buffer_barrier.size = 256;
68 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
69 nullptr);
70
Jeremy Gebben170781d2020-11-19 16:21:21 -070071 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
72 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &back2back);
Jeremy Gebben170781d2020-11-19 16:21:21 -070073
74 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
75 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2back);
76 m_errorMonitor->VerifyFound();
77
78 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
79 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
80 m_errorMonitor->VerifyFound();
81
82 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
83 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
84
85 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -070086 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -070087 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
88 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
89 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
90 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -070091
92 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_c.handle(), buffer_b.handle(), 1, &region);
Jeremy Gebben170781d2020-11-19 16:21:21 -070093
94 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
95 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protect C but not B
96 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
97 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
98 nullptr);
99 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_c.handle(), 1, &region);
100 m_errorMonitor->VerifyFound();
101
102 m_commandBuffer->end();
103
104 // CmdFillBuffer
Jeremy Gebben170781d2020-11-19 16:21:21 -0700105 m_commandBuffer->reset();
106 m_commandBuffer->begin();
107 vk::CmdFillBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, 256, 1);
108 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700109
110 m_commandBuffer->reset();
111 m_commandBuffer->begin();
112 vk::CmdCopyBuffer(cb, buffer_b.handle(), buffer_a.handle(), 1, &region);
113 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
114 vk::CmdFillBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, 256, 1);
115 m_errorMonitor->VerifyFound();
116 m_commandBuffer->end();
117
118 // CmdUpdateBuffer
119 int i = 10;
Jeremy Gebben170781d2020-11-19 16:21:21 -0700120 m_commandBuffer->reset();
121 m_commandBuffer->begin();
122 vk::CmdUpdateBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, sizeof(i), &i);
123 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700124
125 m_commandBuffer->reset();
126 m_commandBuffer->begin();
127 vk::CmdCopyBuffer(cb, buffer_b.handle(), buffer_a.handle(), 1, &region);
128 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
129 vk::CmdUpdateBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, sizeof(i), &i);
130 m_errorMonitor->VerifyFound();
131 m_commandBuffer->end();
132
John Zulaufcbf67cf2021-04-26 21:06:32 -0600133 // Create secondary buffers to use
John Zulaufcbf67cf2021-04-26 21:06:32 -0600134 VkCommandBufferObj secondary_cb1(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
135 VkCommandBuffer scb1 = secondary_cb1.handle();
136 secondary_cb1.begin();
137 vk::CmdCopyBuffer(scb1, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
138 secondary_cb1.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600139
John Zulaufcbf67cf2021-04-26 21:06:32 -0600140 VkCommandBufferObj secondary_cb2(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
141 VkCommandBuffer scb2 = secondary_cb2.handle();
142 secondary_cb2.begin();
143 vk::CmdCopyBuffer(scb2, buffer_a.handle(), buffer_c.handle(), 1, &front2front);
144 secondary_cb2.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600145
John Zulaufcbf67cf2021-04-26 21:06:32 -0600146 VkCommandBufferObj secondary_cb3(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
147 VkCommandBuffer scb3 = secondary_cb3.handle();
148 secondary_cb3.begin();
149 secondary_cb3.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 0,
150 nullptr);
151 secondary_cb3.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600152
John Zulaufcbf67cf2021-04-26 21:06:32 -0600153 VkCommandBufferObj secondary_cb4(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
154 VkCommandBuffer scb4 = secondary_cb4.handle();
155 secondary_cb4.begin();
156 vk::CmdCopyBuffer(scb4, buffer_b.handle(), buffer_c.handle(), 1, &front2front);
157 secondary_cb4.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600158
159 // One secondary CB hazard with active command buffer
John Zulaufee17cce2021-04-15 18:21:38 -0600160 m_commandBuffer->reset();
161 m_commandBuffer->begin();
162 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
John Zulaufee17cce2021-04-15 18:21:38 -0600163 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
John Zulaufcbf67cf2021-04-26 21:06:32 -0600164 vk::CmdExecuteCommands(cb, 1, &scb1);
John Zulaufee17cce2021-04-15 18:21:38 -0600165 m_errorMonitor->VerifyFound();
166 m_commandBuffer->end();
167
John Zulaufcbf67cf2021-04-26 21:06:32 -0600168 // Two secondary CB hazard with each other
John Zulaufee17cce2021-04-15 18:21:38 -0600169 m_commandBuffer->reset();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600170 m_commandBuffer->begin();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600171 // This is also a "SYNC-HAZARD-WRITE_AFTER_WRITE" present, but only the first hazard is reported.
172 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
173 {
174 VkCommandBuffer two_cbs[2] = {scb1, scb2};
175 vk::CmdExecuteCommands(cb, 2, two_cbs);
176 }
177 m_errorMonitor->VerifyFound();
178 m_commandBuffer->end();
John Zulaufee17cce2021-04-15 18:21:38 -0600179
John Zulaufcbf67cf2021-04-26 21:06:32 -0600180 // Two secondary CB hazard with each other
181 m_commandBuffer->reset();
182 m_commandBuffer->begin();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600183 {
184 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
185 VkCommandBuffer two_cbs[2] = {scb1, scb4};
186 vk::CmdExecuteCommands(cb, 2, two_cbs);
187 m_errorMonitor->VerifyFound();
188 }
189 m_commandBuffer->end();
190
191 // Add a secondary CB with a barrier
192 m_commandBuffer->reset();
193 m_commandBuffer->begin();
194 {
John Zulaufcbf67cf2021-04-26 21:06:32 -0600195 VkCommandBuffer three_cbs[3] = {scb1, scb3, scb4};
196 vk::CmdExecuteCommands(cb, 3, three_cbs);
John Zulaufcbf67cf2021-04-26 21:06:32 -0600197 }
198 m_commandBuffer->end();
199
200 m_commandBuffer->reset();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700201 // CmdWriteBufferMarkerAMD
202 if (has_amd_buffer_maker) {
203 auto fpCmdWriteBufferMarkerAMD =
204 (PFN_vkCmdWriteBufferMarkerAMD)vk::GetDeviceProcAddr(m_device->device(), "vkCmdWriteBufferMarkerAMD");
205 if (!fpCmdWriteBufferMarkerAMD) {
206 printf("%s Test requires unsupported vkCmdWriteBufferMarkerAMD feature. Skipped.\n", kSkipPrefix);
207 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -0700208 m_commandBuffer->reset();
209 m_commandBuffer->begin();
210 fpCmdWriteBufferMarkerAMD(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, buffer_a.handle(), 0, 1);
211 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700212
213 m_commandBuffer->reset();
214 m_commandBuffer->begin();
215 vk::CmdCopyBuffer(cb, buffer_b.handle(), buffer_a.handle(), 1, &region);
216 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
217 fpCmdWriteBufferMarkerAMD(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, buffer_a.handle(), 0, 1);
218 m_errorMonitor->VerifyFound();
219 m_commandBuffer->end();
220 }
221 } else {
222 printf("%s Test requires unsupported vkCmdWriteBufferMarkerAMD feature. Skipped.\n", kSkipPrefix);
223 }
224}
225
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700226TEST_F(VkSyncValTest, Sync2BufferCopyHazards) {
227 SetTargetApiVersion(VK_API_VERSION_1_2);
228 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
229 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME)) {
230 m_device_extension_names.push_back(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
231 } else {
sjfricke20f4a872022-08-12 08:28:05 +0900232 GTEST_SKIP() << "Synchronization2 not supported";
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700233 }
234
235 if (!CheckSynchronization2SupportAndInitState(this)) {
sjfricke20f4a872022-08-12 08:28:05 +0900236 GTEST_SKIP() << "Synchronization2 not supported";
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700237 }
238 auto fpCmdPipelineBarrier2KHR = (PFN_vkCmdPipelineBarrier2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPipelineBarrier2KHR");
239
240 VkBufferObj buffer_a;
241 VkBufferObj buffer_b;
242 VkBufferObj buffer_c;
243 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
244 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
245 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
246 buffer_c.init_as_src_and_dst(*m_device, 256, mem_prop);
247
248 VkBufferCopy region = {0, 0, 256};
249 VkBufferCopy front2front = {0, 0, 128};
250 VkBufferCopy front2back = {0, 128, 128};
251 VkBufferCopy back2back = {128, 128, 128};
252
253 auto cb = m_commandBuffer->handle();
254 m_commandBuffer->begin();
255
256 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
257
258 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
259 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
260 m_errorMonitor->VerifyFound();
261
262 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
263 {
264 auto buffer_barrier = lvl_init_struct<VkBufferMemoryBarrier2KHR>();
265 buffer_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
266 buffer_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
267 buffer_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR;
268 buffer_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
269 buffer_barrier.buffer = buffer_a.handle();
270 buffer_barrier.offset = 0;
271 buffer_barrier.size = 256;
272 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
273 dep_info.bufferMemoryBarrierCount = 1;
274 dep_info.pBufferMemoryBarriers = &buffer_barrier;
275 fpCmdPipelineBarrier2KHR(cb, &dep_info);
276 }
277
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700278 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
279 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &back2back);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700280
281 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
282 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2back);
283 m_errorMonitor->VerifyFound();
284
285 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
286 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
287 m_errorMonitor->VerifyFound();
288
289 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
290 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
291
292 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
293 {
294 auto mem_barrier = lvl_init_struct<VkMemoryBarrier2KHR>();
295 mem_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
296 mem_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
297 mem_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
298 mem_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
299 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
300 dep_info.memoryBarrierCount = 1;
301 dep_info.pMemoryBarriers = &mem_barrier;
302 fpCmdPipelineBarrier2KHR(cb, &dep_info);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700303
304 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_c.handle(), buffer_b.handle(), 1, &region);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700305
306 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
307 mem_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR; // Protect C but not B
308 mem_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
309 fpCmdPipelineBarrier2KHR(cb, &dep_info);
310 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_c.handle(), 1, &region);
311 m_errorMonitor->VerifyFound();
312
313 m_commandBuffer->end();
314 }
315}
316
Jeremy Gebben170781d2020-11-19 16:21:21 -0700317TEST_F(VkSyncValTest, SyncCopyOptimalImageHazards) {
318 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
319 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
320
321 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
322 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
323 VkImageObj image_a(m_device);
324 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
325 image_a.Init(image_ci);
326 ASSERT_TRUE(image_a.initialized());
327
328 VkImageObj image_b(m_device);
329 image_b.Init(image_ci);
330 ASSERT_TRUE(image_b.initialized());
331
332 VkImageObj image_c(m_device);
333 image_c.Init(image_ci);
334 ASSERT_TRUE(image_c.initialized());
335
336 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 2};
337 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
338 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
339 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
340 VkOffset3D zero_offset{0, 0, 0};
341 VkOffset3D half_offset{64, 64, 0};
342 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
343 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
344
345 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
346 VkImageCopy region_0_to_0 = {layers_0, zero_offset, layers_0, zero_offset, full_extent};
347 VkImageCopy region_0_to_1 = {layers_0, zero_offset, layers_1, zero_offset, full_extent};
348 VkImageCopy region_1_to_1 = {layers_1, zero_offset, layers_1, zero_offset, full_extent};
349 VkImageCopy region_0_front = {layers_0, zero_offset, layers_0, zero_offset, half_extent};
350 VkImageCopy region_0_back = {layers_0, half_offset, layers_0, half_offset, half_extent};
351
352 m_commandBuffer->begin();
353
354 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
355 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
356 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
357
358 auto cb = m_commandBuffer->handle();
359
360 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
361
362 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
363 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
364 m_errorMonitor->VerifyFound();
365
366 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700367 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700368 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
369 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
370 image_barrier.image = image_a.handle();
371 image_barrier.subresourceRange = full_subresource_range;
372 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
373 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
374 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
375 &image_barrier);
376
Jeremy Gebben170781d2020-11-19 16:21:21 -0700377 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_0);
378 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_1_to_1);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700379
380 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
381 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_1);
382 m_errorMonitor->VerifyFound();
383
384 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
385 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
386 m_errorMonitor->VerifyFound();
387
388 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
389 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
390
391 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700392 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700393 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
394 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
395 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
396 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700397 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700398
399 // Use barrier to protect last reader, but not last writer...
400 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
401 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
402 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
403 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
404 nullptr);
405 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
406 m_errorMonitor->VerifyFound();
407
408 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
409 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
410 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
411 m_errorMonitor->VerifyFound();
412
Jeremy Gebben170781d2020-11-19 16:21:21 -0700413 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700414
415 m_commandBuffer->end();
416
John Zulaufe972b752021-05-04 15:47:17 -0600417 // Test secondary command buffers
418 // Create secondary buffers to use
John Zulaufe972b752021-05-04 15:47:17 -0600419 VkCommandBufferObj secondary_cb1(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
420 VkCommandBuffer scb1 = secondary_cb1.handle();
421 secondary_cb1.begin();
422 vk::CmdCopyImage(scb1, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
423 secondary_cb1.end();
John Zulaufe972b752021-05-04 15:47:17 -0600424
425 auto record_primary = [&]() {
426 m_commandBuffer->reset();
427 m_commandBuffer->begin();
428 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
429 vk::CmdExecuteCommands(cb, 1, &scb1);
430 m_commandBuffer->end();
431 };
432
433 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
434 record_primary();
435 m_errorMonitor->VerifyFound();
436
John Zulaufe972b752021-05-04 15:47:17 -0600437 // With a barrier...
438 secondary_cb1.reset();
439 secondary_cb1.begin();
440 vk::CmdPipelineBarrier(scb1, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
441 nullptr);
442 vk::CmdCopyImage(scb1, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
443 secondary_cb1.end();
444 record_primary();
John Zulaufe972b752021-05-04 15:47:17 -0600445
446 auto image_transition_barrier = image_barrier;
447 image_transition_barrier.image = image_a.handle();
448 image_transition_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
449 image_transition_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
450
John Zulaufe972b752021-05-04 15:47:17 -0600451 secondary_cb1.reset();
452 secondary_cb1.begin();
453 // Use the wrong stage, get an error
454 vk::CmdPipelineBarrier(scb1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, nullptr, 0, nullptr, 1,
455 &image_transition_barrier);
456 secondary_cb1.end();
John Zulaufe972b752021-05-04 15:47:17 -0600457
458 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
459 record_primary();
460 m_errorMonitor->VerifyFound();
461
462 // CmdResolveImage hazard testing
Jeremy Gebben170781d2020-11-19 16:21:21 -0700463 VkImageFormatProperties formProps = {{0, 0, 0}, 0, 0, 0, 0};
464 vk::GetPhysicalDeviceImageFormatProperties(m_device->phy().handle(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D,
465 VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, 0, &formProps);
466
467 if (!(formProps.sampleCounts & VK_SAMPLE_COUNT_2_BIT)) {
468 printf("%s CmdResolveImage Test requires unsupported VK_SAMPLE_COUNT_2_BIT feature. Skipped.\n", kSkipPrefix);
469 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -0700470 VkImageObj image_s2_a(m_device), image_s2_b(m_device);
471 image_ci.samples = VK_SAMPLE_COUNT_2_BIT;
472 image_s2_a.Init(image_ci);
473 ASSERT_TRUE(image_s2_a.initialized());
474
475 image_s2_b.Init(image_ci);
476 ASSERT_TRUE(image_s2_b.initialized());
477
478 VkImageResolve r_full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
479
480 m_commandBuffer->reset();
481 m_commandBuffer->begin();
482 image_s2_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
483 image_s2_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
484 vk::CmdResolveImage(cb, image_s2_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
485 &r_full_region);
486 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700487
488 m_commandBuffer->reset();
489 m_commandBuffer->begin();
490 vk::CmdCopyImage(cb, image_s2_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s2_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
491 &full_region);
492 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
493
494 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
495 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
496 vk::CmdResolveImage(cb, image_s2_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
497 &r_full_region);
498 m_errorMonitor->VerifyFound();
499
500 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
501 vk::CmdResolveImage(cb, image_s2_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
502 &r_full_region);
503 m_errorMonitor->VerifyFound();
504 m_commandBuffer->end();
505 }
506}
507
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700508TEST_F(VkSyncValTest, Sync2CopyOptimalImageHazards) {
509 SetTargetApiVersion(VK_API_VERSION_1_2);
510 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
511 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME)) {
512 m_device_extension_names.push_back(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
513 } else {
sjfricke20f4a872022-08-12 08:28:05 +0900514 GTEST_SKIP() << "Synchronization2 not supported";
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700515 }
516
517 if (!CheckSynchronization2SupportAndInitState(this)) {
sjfricke20f4a872022-08-12 08:28:05 +0900518 GTEST_SKIP() << "Synchronization2 not supported";
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700519 }
520 auto fpCmdPipelineBarrier2KHR = (PFN_vkCmdPipelineBarrier2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPipelineBarrier2KHR");
521
522 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
523 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
524 VkImageObj image_a(m_device);
525 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
526 image_a.Init(image_ci);
527 ASSERT_TRUE(image_a.initialized());
528
529 VkImageObj image_b(m_device);
530 image_b.Init(image_ci);
531 ASSERT_TRUE(image_b.initialized());
532
533 VkImageObj image_c(m_device);
534 image_c.Init(image_ci);
535 ASSERT_TRUE(image_c.initialized());
536
537 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 2};
538 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
539 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
540 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
541 VkOffset3D zero_offset{0, 0, 0};
542 VkOffset3D half_offset{64, 64, 0};
543 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
544 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
545
546 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
547 VkImageCopy region_0_to_0 = {layers_0, zero_offset, layers_0, zero_offset, full_extent};
548 VkImageCopy region_0_to_1 = {layers_0, zero_offset, layers_1, zero_offset, full_extent};
549 VkImageCopy region_1_to_1 = {layers_1, zero_offset, layers_1, zero_offset, full_extent};
550 VkImageCopy region_0_front = {layers_0, zero_offset, layers_0, zero_offset, half_extent};
551 VkImageCopy region_0_back = {layers_0, half_offset, layers_0, half_offset, half_extent};
552
553 m_commandBuffer->begin();
554
555 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
556 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
557 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
558
559 auto cb = m_commandBuffer->handle();
560
561 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
562
563 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
564 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
565 m_errorMonitor->VerifyFound();
566
567 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
568 {
569 auto image_barrier = lvl_init_struct<VkImageMemoryBarrier2KHR>();
570 image_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
571 image_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
572 image_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR;
573 image_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
574 image_barrier.image = image_a.handle();
575 image_barrier.subresourceRange = full_subresource_range;
576 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
577 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
578 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
579 dep_info.imageMemoryBarrierCount = 1;
580 dep_info.pImageMemoryBarriers = &image_barrier;
581 fpCmdPipelineBarrier2KHR(cb, &dep_info);
582 }
583
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700584 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_0);
585 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_1_to_1);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700586
587 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
588 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_1);
589 m_errorMonitor->VerifyFound();
590
591 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
592 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
593 m_errorMonitor->VerifyFound();
594
595 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
596 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
597
598 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
599 {
600 auto mem_barrier = lvl_init_struct<VkMemoryBarrier2KHR>();
601 mem_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
602 mem_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
603 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
604 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
605 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
606 dep_info.memoryBarrierCount = 1;
607 dep_info.pMemoryBarriers = &mem_barrier;
608 fpCmdPipelineBarrier2KHR(cb, &dep_info);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700609 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700610
611 // Use barrier to protect last reader, but not last writer...
612 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
613 mem_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR; // Protects C but not B
614 mem_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
615 fpCmdPipelineBarrier2KHR(cb, &dep_info);
616 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
617 m_errorMonitor->VerifyFound();
618 }
619
620 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
621 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
622 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
623 m_errorMonitor->VerifyFound();
624
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700625 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_back);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700626
627 m_commandBuffer->end();
628}
629
Jeremy Gebben170781d2020-11-19 16:21:21 -0700630TEST_F(VkSyncValTest, SyncCopyOptimalMultiPlanarHazards) {
631 // TODO: Add code to enable sync validation
632 // Enable KHR multiplane req'd extensions
633 bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
634 VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
635 if (mp_extensions) {
636 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
637 }
638 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700639 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700640 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
641 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
642 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
643 if (mp_extensions) {
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700644 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700645 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
646 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
647 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
648 } else {
649 printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
650 return;
651 }
652
653 ASSERT_NO_FATAL_FAILURE(InitState());
654
655 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
656 VkFormat format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
657 VkImageObj image_a(m_device);
658 const auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
659 // Verify format
660 bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), image_ci,
661 VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT);
662 if (!supported) {
663 printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
664 return; // Assume there's low ROI on searching for different mp formats
665 }
666
667 image_a.Init(image_ci);
668 VkImageObj image_b(m_device);
669 image_b.Init(image_ci);
670 VkImageObj image_c(m_device);
671 image_c.Init(image_ci);
672
673 VkImageSubresourceLayers layer_all_plane0{VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, 0, 0, 2};
674 VkImageSubresourceLayers layer0_plane0{VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, 0, 0, 1};
675 VkImageSubresourceLayers layer0_plane1{VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, 0, 0, 1};
676 VkImageSubresourceLayers layer1_plane1{VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, 0, 1, 1};
677 VkImageSubresourceRange full_subresource_range{
678 VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR, 0, 1, 0, 2};
679 VkOffset3D zero_offset{0, 0, 0};
680 VkOffset3D one_four_offset{32, 32, 0};
681 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
682 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
683 VkExtent3D one_four_extent{32, 32, 1}; // <-- image type is 2D
684
685 VkImageCopy region_all_plane0_to_all_plane0 = {layer_all_plane0, zero_offset, layer_all_plane0, zero_offset, full_extent};
686 VkImageCopy region_layer0_plane0_to_layer0_plane0 = {layer0_plane0, zero_offset, layer0_plane0, zero_offset, full_extent};
687 VkImageCopy region_layer0_plane0_to_layer0_plane1 = {layer0_plane0, zero_offset, layer0_plane1, zero_offset, half_extent};
688 VkImageCopy region_layer1_plane1_to_layer1_plane1_front = {layer1_plane1, zero_offset, layer1_plane1, zero_offset,
689 one_four_extent};
690 VkImageCopy region_layer1_plane1_to_layer1_plane1_back = {layer1_plane1, one_four_offset, layer1_plane1, one_four_offset,
691 one_four_extent};
692
693 m_commandBuffer->begin();
694
695 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
696 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
697 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
698
699 auto cb = m_commandBuffer->handle();
700
701 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
702 &region_all_plane0_to_all_plane0);
703
704 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
705 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
706 &region_all_plane0_to_all_plane0);
707 m_errorMonitor->VerifyFound();
708
709 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700710 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700711 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
712 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
713 image_barrier.image = image_a.handle();
714 image_barrier.subresourceRange = full_subresource_range;
715 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
716 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
717 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
718 &image_barrier);
719
Jeremy Gebben170781d2020-11-19 16:21:21 -0700720 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
721 &region_layer0_plane0_to_layer0_plane0);
722 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
723 &region_layer0_plane0_to_layer0_plane1);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700724
725 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
726 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
727 &region_layer0_plane0_to_layer0_plane1);
728 m_errorMonitor->VerifyFound();
729
730 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
731 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
732 &region_all_plane0_to_all_plane0);
733 m_errorMonitor->VerifyFound();
734
735 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
736 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
737
738 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700739 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700740 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
741 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
742 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
743 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700744 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
745 &region_all_plane0_to_all_plane0);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700746
747 // Use barrier to protect last reader, but not last writer...
748 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
749 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
750 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
751 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
752 nullptr);
753 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
754 &region_all_plane0_to_all_plane0);
755 m_errorMonitor->VerifyFound();
756
757 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
758 &region_layer1_plane1_to_layer1_plane1_front);
759 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
760 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
761 &region_layer1_plane1_to_layer1_plane1_front);
762 m_errorMonitor->VerifyFound();
763
Jeremy Gebben170781d2020-11-19 16:21:21 -0700764 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
765 &region_layer1_plane1_to_layer1_plane1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700766
767 m_commandBuffer->end();
768}
769
770TEST_F(VkSyncValTest, SyncCopyLinearImageHazards) {
771 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
772 ASSERT_NO_FATAL_FAILURE(InitState());
773
774 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
775 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
776 VkImageObj image_a(m_device);
777 const auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_LINEAR);
778 image_a.Init(image_ci);
779 VkImageObj image_b(m_device);
780 image_b.Init(image_ci);
781 VkImageObj image_c(m_device);
782 image_c.Init(image_ci);
783
784 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
785 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
786 VkOffset3D zero_offset{0, 0, 0};
787 VkOffset3D half_offset{64, 64, 0};
788 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
789 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
790
791 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
792 VkImageCopy region_front = {layers_all, zero_offset, layers_all, zero_offset, half_extent};
793 VkImageCopy region_back = {layers_all, half_offset, layers_all, half_offset, half_extent};
794
795 m_commandBuffer->begin();
796
797 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
798 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
799 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
800
801 auto cb = m_commandBuffer->handle();
802
803 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
804
805 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
806 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
807 m_errorMonitor->VerifyFound();
808
809 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700810 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700811 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
812 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
813 image_barrier.image = image_b.handle();
814 image_barrier.subresourceRange = full_subresource_range;
815 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
816 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
817 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
818 &image_barrier);
819
Jeremy Gebben170781d2020-11-19 16:21:21 -0700820 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700821
822 // Use barrier to protect last reader, but not last writer...
823 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
824 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
825 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
826 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
827 &image_barrier);
828 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
829 m_errorMonitor->VerifyFound();
830
831 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_front);
832 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
833 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_front);
834 m_errorMonitor->VerifyFound();
835
Jeremy Gebben170781d2020-11-19 16:21:21 -0700836 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700837}
838
839TEST_F(VkSyncValTest, SyncCopyLinearMultiPlanarHazards) {
840 // TODO: Add code to enable sync validation
841 // Enable KHR multiplane req'd extensions
842 bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
843 VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
844 if (mp_extensions) {
845 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
846 }
847 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700848 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700849 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
850 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
851 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
852 if (mp_extensions) {
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700853 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700854 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
855 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
856 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
857 } else {
858 printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
859 return;
860 }
861
862 ASSERT_NO_FATAL_FAILURE(InitState());
863
864 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
865 VkFormat format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
866 VkImageObj image_a(m_device);
867 const auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_LINEAR);
868 // Verify format
869 bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), image_ci,
870 VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT);
871 if (!supported) {
872 printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
873 return; // Assume there's low ROI on searching for different mp formats
874 }
875
876 image_a.Init(image_ci);
877 VkImageObj image_b(m_device);
878 image_b.Init(image_ci);
879 VkImageObj image_c(m_device);
880 image_c.Init(image_ci);
881
882 VkImageSubresourceLayers layer_all_plane0{VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, 0, 0, 1};
883 VkImageSubresourceLayers layer_all_plane1{VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, 0, 0, 1};
884 VkImageSubresourceRange full_subresource_range{
885 VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR, 0, 1, 0, 1};
886 VkOffset3D zero_offset{0, 0, 0};
887 VkOffset3D one_four_offset{32, 32, 0};
888 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
889 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
890 VkExtent3D one_four_extent{32, 32, 1}; // <-- image type is 2D
891
892 VkImageCopy region_plane0_to_plane0 = {layer_all_plane0, zero_offset, layer_all_plane0, zero_offset, full_extent};
893 VkImageCopy region_plane0_to_plane1 = {layer_all_plane0, zero_offset, layer_all_plane1, zero_offset, half_extent};
894 VkImageCopy region_plane1_to_plane1_front = {layer_all_plane1, zero_offset, layer_all_plane1, zero_offset, one_four_extent};
895 VkImageCopy region_plane1_to_plane1_back = {layer_all_plane1, one_four_offset, layer_all_plane1, one_four_offset,
896 one_four_extent};
897
898 m_commandBuffer->begin();
899
900 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
901 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
902 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
903
904 auto cb = m_commandBuffer->handle();
905
906 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
907 &region_plane0_to_plane0);
908
909 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
910 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
911 &region_plane0_to_plane0);
912 m_errorMonitor->VerifyFound();
913
914 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700915 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700916 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
917 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
918 image_barrier.image = image_a.handle();
919 image_barrier.subresourceRange = full_subresource_range;
920 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
921 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
922 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
923 &image_barrier);
924
Jeremy Gebben170781d2020-11-19 16:21:21 -0700925 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
926 &region_plane0_to_plane0);
927 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
928 &region_plane0_to_plane1);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700929
930 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
931 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
932 &region_plane0_to_plane1);
933 m_errorMonitor->VerifyFound();
934
935 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
936 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
937 &region_plane0_to_plane0);
938 m_errorMonitor->VerifyFound();
939
940 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
941 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
942
943 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700944 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700945 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
946 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
947 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
948 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700949 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
950 &region_plane0_to_plane0);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700951
952 // Use barrier to protect last reader, but not last writer...
953 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
954 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
955 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
956 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
957 nullptr);
958 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
959 &region_plane0_to_plane0);
960 m_errorMonitor->VerifyFound();
961
962 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
963 &region_plane1_to_plane1_front);
964 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
965 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
966 &region_plane1_to_plane1_front);
967 m_errorMonitor->VerifyFound();
968
Jeremy Gebben170781d2020-11-19 16:21:21 -0700969 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
970 &region_plane1_to_plane1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700971
972 m_commandBuffer->end();
973}
974
975TEST_F(VkSyncValTest, SyncCopyBufferImageHazards) {
976 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
977 ASSERT_NO_FATAL_FAILURE(InitState());
978
979 VkBufferObj buffer_a, buffer_b;
980 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
981 buffer_a.init_as_src_and_dst(*m_device, 2048, mem_prop);
982 buffer_b.init_as_src_and_dst(*m_device, 2048, mem_prop);
983
984 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
985 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
986 VkImageObj image_a(m_device), image_b(m_device);
987 const auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
988 image_a.Init(image_ci);
989 image_b.Init(image_ci);
990
991 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
992 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
993 VkOffset3D zero_offset{0, 0, 0};
994 VkOffset3D half_offset{16, 16, 0};
995 VkExtent3D half_extent{16, 16, 1}; // <-- image type is 2D
996
997 VkBufferImageCopy region_buffer_front_image_0_front = {0, 16, 16, layers_0, zero_offset, half_extent};
998 VkBufferImageCopy region_buffer_front_image_1_front = {0, 16, 16, layers_1, zero_offset, half_extent};
999 VkBufferImageCopy region_buffer_front_image_1_back = {0, 16, 16, layers_1, half_offset, half_extent};
1000 VkBufferImageCopy region_buffer_back_image_0_front = {1024, 16, 16, layers_0, zero_offset, half_extent};
1001 VkBufferImageCopy region_buffer_back_image_0_back = {1024, 16, 16, layers_0, half_offset, half_extent};
1002 VkBufferImageCopy region_buffer_back_image_1_front = {1024, 16, 16, layers_1, zero_offset, half_extent};
1003 VkBufferImageCopy region_buffer_back_image_1_back = {1024, 16, 16, layers_1, half_offset, half_extent};
1004
1005 m_commandBuffer->begin();
1006 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1007 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1008
1009 auto cb = m_commandBuffer->handle();
1010 vk::CmdCopyBufferToImage(cb, buffer_a.handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1011 &region_buffer_front_image_0_front);
1012
1013 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1014 vk::CmdCopyBufferToImage(cb, buffer_a.handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1015 &region_buffer_front_image_0_front);
1016 m_errorMonitor->VerifyFound();
1017
1018 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1019 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1020 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1021 &region_buffer_front_image_0_front);
1022 m_errorMonitor->VerifyFound();
1023
1024 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1025 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1026 &region_buffer_back_image_0_front);
1027 m_errorMonitor->VerifyFound();
1028
1029 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1030 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1031 &region_buffer_front_image_1_front);
1032 m_errorMonitor->VerifyFound();
1033
1034 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1035 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1036 &region_buffer_front_image_1_back);
1037 m_errorMonitor->VerifyFound();
1038
Jeremy Gebben170781d2020-11-19 16:21:21 -07001039 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1, &region_buffer_back_image_0_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001040
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001041 auto buffer_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001042 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1043 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1044 buffer_barrier.buffer = buffer_a.handle();
1045 buffer_barrier.offset = 1024;
1046 buffer_barrier.size = 2048;
1047 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1048 nullptr);
1049
Jeremy Gebben170781d2020-11-19 16:21:21 -07001050 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1051 &region_buffer_back_image_1_front);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001052
1053 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1054 nullptr);
1055
Jeremy Gebben170781d2020-11-19 16:21:21 -07001056 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1, &region_buffer_back_image_1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001057
1058 vk::CmdCopyImageToBuffer(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_b.handle(), 1,
1059 &region_buffer_front_image_0_front);
1060
1061 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1062 vk::CmdCopyImageToBuffer(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_b.handle(), 1,
1063 &region_buffer_front_image_0_front);
1064 m_errorMonitor->VerifyFound();
1065
1066 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1067 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1068 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1069 &region_buffer_front_image_0_front);
1070 m_errorMonitor->VerifyFound();
1071
1072 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1073 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1074 &region_buffer_back_image_0_front);
1075 m_errorMonitor->VerifyFound();
1076
1077 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1078 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1079 &region_buffer_front_image_1_front);
1080 m_errorMonitor->VerifyFound();
1081
1082 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1083 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1084 &region_buffer_front_image_1_back);
1085 m_errorMonitor->VerifyFound();
1086
Jeremy Gebben170781d2020-11-19 16:21:21 -07001087 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_buffer_back_image_0_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001088
1089 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1090 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1091 buffer_barrier.buffer = buffer_b.handle();
1092 buffer_barrier.offset = 1024;
1093 buffer_barrier.size = 2048;
1094 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1095 nullptr);
1096
Jeremy Gebben170781d2020-11-19 16:21:21 -07001097 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1098 &region_buffer_back_image_1_front);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001099
1100 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1101 nullptr);
1102
Jeremy Gebben170781d2020-11-19 16:21:21 -07001103 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_buffer_back_image_1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001104
1105 m_commandBuffer->end();
1106}
1107
1108TEST_F(VkSyncValTest, SyncBlitImageHazards) {
1109 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1110 ASSERT_NO_FATAL_FAILURE(InitState());
1111
1112 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1113 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1114 VkImageObj image_a(m_device), image_b(m_device);
1115 const auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
1116 image_a.Init(image_ci);
1117 image_b.Init(image_ci);
1118
1119 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1120 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
1121 VkOffset3D zero_offset{0, 0, 0};
1122 VkOffset3D half_0_offset{16, 16, 0};
1123 VkOffset3D half_1_offset{16, 16, 1};
1124 VkOffset3D full_offset{32, 32, 1};
1125 VkImageBlit region_0_front_1_front = {layers_0, {zero_offset, half_1_offset}, layers_1, {zero_offset, half_1_offset}};
1126 VkImageBlit region_1_front_0_front = {layers_1, {zero_offset, half_1_offset}, layers_0, {zero_offset, half_1_offset}};
1127 VkImageBlit region_1_back_0_back = {layers_1, {half_0_offset, full_offset}, layers_0, {half_0_offset, full_offset}};
1128
1129 m_commandBuffer->begin();
1130 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1131 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1132
1133 auto cb = m_commandBuffer->handle();
1134
1135 vk::CmdBlitImage(cb, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1136 &region_0_front_1_front, VK_FILTER_NEAREST);
1137
1138 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1139 vk::CmdBlitImage(cb, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1140 &region_0_front_1_front, VK_FILTER_NEAREST);
1141 m_errorMonitor->VerifyFound();
1142
1143 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1144 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1145 vk::CmdBlitImage(cb, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1146 &region_1_front_0_front, VK_FILTER_NEAREST);
1147 m_errorMonitor->VerifyFound();
1148
Jeremy Gebben170781d2020-11-19 16:21:21 -07001149 vk::CmdBlitImage(cb, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1150 &region_1_back_0_back, VK_FILTER_NEAREST);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001151
1152 m_commandBuffer->end();
1153}
1154
1155TEST_F(VkSyncValTest, SyncRenderPassBeginTransitionHazard) {
1156 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1157 ASSERT_NO_FATAL_FAILURE(InitState());
John Zulaufbb373682021-10-05 17:21:40 -06001158 const VkSubpassDependency external_subpass_dependency = {VK_SUBPASS_EXTERNAL,
1159 0,
1160 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
1161 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
1162 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1163 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1164 VK_DEPENDENCY_BY_REGION_BIT};
1165 m_additionalSubpassDependencies.push_back(external_subpass_dependency);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001166 ASSERT_NO_FATAL_FAILURE(InitRenderTarget(2));
1167
1168 // Render Target Information
1169 auto width = static_cast<uint32_t>(m_width);
1170 auto height = static_cast<uint32_t>(m_height);
1171 auto *rt_0 = m_renderTargets[0].get();
1172 auto *rt_1 = m_renderTargets[1].get();
1173
1174 // Other buffers with which to interact
1175 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1176 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1177 VkImageObj image_a(m_device), image_b(m_device);
1178 const auto image_ci = VkImageObj::ImageCreateInfo2D(width, height, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
1179 image_a.Init(image_ci);
1180 image_b.Init(image_ci);
1181
1182 VkOffset3D zero_offset{0, 0, 0};
1183 VkExtent3D full_extent{width, height, 1}; // <-- image type is 2D
1184 VkImageSubresourceLayers layer_color{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1185 VkImageCopy region_to_copy = {layer_color, zero_offset, layer_color, zero_offset, full_extent};
1186
1187 auto cb = m_commandBuffer->handle();
1188
Jeremy Gebben170781d2020-11-19 16:21:21 -07001189 m_commandBuffer->begin();
1190 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1191 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1192 rt_0->SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1193 rt_1->SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1194
1195 rt_0->SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1196 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, rt_0->handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_to_copy);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001197
1198 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1199 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo); // This fails so the driver call is skip and no end is valid
1200 m_errorMonitor->VerifyFound();
1201
Jeremy Gebben170781d2020-11-19 16:21:21 -07001202 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
1203 VkImageSubresourceRange rt_full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001204 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001205 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
John Zulaufbb373682021-10-05 17:21:40 -06001206 image_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001207 image_barrier.image = rt_0->handle();
1208 image_barrier.subresourceRange = rt_full_subresource_range;
1209 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
1210 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
John Zulaufbb373682021-10-05 17:21:40 -06001211 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0,
1212 nullptr, 1, &image_barrier);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001213 vk::CmdCopyImage(cb, rt_1->handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_to_copy);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001214
1215 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1216 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo); // This fails so the driver call is skip and no end is valid
1217 m_errorMonitor->VerifyFound();
1218
Jeremy Gebben170781d2020-11-19 16:21:21 -07001219 // A global execution barrier that the implict external dependency can chain with should work...
1220 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, nullptr, 0, nullptr, 0,
1221 nullptr);
1222
1223 // With the barrier above, the layout transition has a chained execution sync operation, and the default
1224 // implict VkSubpassDependency safes the load op clear vs. the layout transition...
1225 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1226 m_commandBuffer->EndRenderPass();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001227}
1228
1229TEST_F(VkSyncValTest, SyncCmdDispatchDrawHazards) {
1230 // TODO: Add code to enable sync validation
1231 SetTargetApiVersion(VK_API_VERSION_1_2);
1232
1233 // Enable VK_KHR_draw_indirect_count for KHR variants
1234 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
sfricke-samsung6fc3e322022-02-15 22:41:29 -08001235 VkPhysicalDeviceVulkan12Features features12 = LvlInitStruct<VkPhysicalDeviceVulkan12Features>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001236 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
1237 m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
1238 if (DeviceValidationVersion() >= VK_API_VERSION_1_2) {
1239 features12.drawIndirectCount = VK_TRUE;
1240 }
1241 }
1242 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features12, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
1243 bool has_khr_indirect = DeviceExtensionEnabled(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
1244 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1245
1246 VkImageUsageFlags image_usage_combine = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT |
1247 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1248 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1249 VkImageObj image_c_a(m_device), image_c_b(m_device);
1250 const auto image_c_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_combine, VK_IMAGE_TILING_OPTIMAL);
1251 image_c_a.Init(image_c_ci);
1252 image_c_b.Init(image_c_ci);
1253
1254 VkImageView imageview_c = image_c_a.targetView(format);
1255 VkImageUsageFlags image_usage_storage =
1256 VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1257 VkImageObj image_s_a(m_device), image_s_b(m_device);
1258 const auto image_s_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_storage, VK_IMAGE_TILING_OPTIMAL);
1259 image_s_a.Init(image_s_ci);
1260 image_s_b.Init(image_s_ci);
1261 image_s_a.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1262 image_s_b.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1263
1264 VkImageView imageview_s = image_s_a.targetView(format);
1265
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001266 vk_testing::Sampler sampler_s, sampler_c;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001267 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001268 sampler_s.init(*m_device, sampler_ci);
1269 sampler_c.init(*m_device, sampler_ci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001270
1271 VkBufferObj buffer_a, buffer_b;
1272 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1273 VkBufferUsageFlags buffer_usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT |
1274 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1275 buffer_a.init(*m_device, buffer_a.create_info(2048, buffer_usage, nullptr), mem_prop);
1276 buffer_b.init(*m_device, buffer_b.create_info(2048, buffer_usage, nullptr), mem_prop);
1277
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001278 vk_testing::BufferView bufferview;
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001279 auto bvci = LvlInitStruct<VkBufferViewCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001280 bvci.buffer = buffer_a.handle();
1281 bvci.format = VK_FORMAT_R32_SFLOAT;
1282 bvci.offset = 0;
1283 bvci.range = VK_WHOLE_SIZE;
1284
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001285 bufferview.init(*m_device, bvci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001286
1287 OneOffDescriptorSet descriptor_set(m_device,
1288 {
1289 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
1290 {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
1291 {2, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
1292 {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
1293 });
1294
sfricke-samsung36428462021-02-10 01:23:34 -08001295 descriptor_set.WriteDescriptorBufferInfo(0, buffer_a.handle(), 0, 2048);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001296 descriptor_set.WriteDescriptorImageInfo(1, imageview_c, sampler_c.handle(), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
Jeremy Gebben170781d2020-11-19 16:21:21 -07001297 VK_IMAGE_LAYOUT_GENERAL);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001298 descriptor_set.WriteDescriptorImageInfo(2, imageview_s, sampler_s.handle(), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_IMAGE_LAYOUT_GENERAL);
1299 descriptor_set.WriteDescriptorBufferView(3, bufferview.handle());
Jeremy Gebben170781d2020-11-19 16:21:21 -07001300 descriptor_set.UpdateDescriptorSets();
1301
1302 // Dispatch
sjfricke394227a2022-06-20 16:47:38 +09001303 const char *csSource = R"glsl(
sfricke-samsung1c0b96a2021-07-08 22:24:09 -07001304 #version 450
1305 layout(set=0, binding=0) uniform foo { float x; } ub0;
1306 layout(set=0, binding=1) uniform sampler2D cis1;
1307 layout(set=0, binding=2, rgba8) uniform readonly image2D si2;
1308 layout(set=0, binding=3, r32f) uniform readonly imageBuffer stb3;
1309 void main(){
1310 vec4 vColor4;
1311 vColor4.x = ub0.x;
1312 vColor4 = texture(cis1, vec2(0));
1313 vColor4 = imageLoad(si2, ivec2(0));
1314 vColor4 = imageLoad(stb3, 0);
1315 }
1316 )glsl";
Jeremy Gebben170781d2020-11-19 16:21:21 -07001317
John Zulaufbe8562b2020-12-15 14:21:01 -07001318 VkEventObj event;
1319 event.init(*m_device, VkEventObj::create_info(0));
1320 VkEvent event_handle = event.handle();
1321
Jeremy Gebben170781d2020-11-19 16:21:21 -07001322 CreateComputePipelineHelper pipe(*this);
1323 pipe.InitInfo();
sfricke-samsungae54c1e2022-01-21 05:35:21 -08001324 pipe.cs_.reset(new VkShaderObj(this, csSource, VK_SHADER_STAGE_COMPUTE_BIT));
Jeremy Gebben170781d2020-11-19 16:21:21 -07001325 pipe.InitState();
1326 pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
1327 pipe.CreateComputePipeline();
1328
1329 m_commandBuffer->begin();
1330
1331 VkBufferCopy buffer_region = {0, 0, 2048};
1332 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_a.handle(), 1, &buffer_region);
1333
1334 VkImageSubresourceLayers layer{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1335 VkOffset3D zero_offset{0, 0, 0};
1336 VkExtent3D full_extent{16, 16, 1};
1337 VkImageCopy image_region = {layer, zero_offset, layer, zero_offset, full_extent};
1338 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
1339 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
1340 vk::CmdCopyImage(m_commandBuffer->handle(), image_s_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s_a.handle(),
1341 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
1342
1343 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1344 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1345 &descriptor_set.set_, 0, nullptr);
1346
1347 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1348 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1349 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1350 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1351 vk::CmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
1352 m_errorMonitor->VerifyFound();
1353
1354 m_commandBuffer->end();
1355 m_commandBuffer->reset();
1356 m_commandBuffer->begin();
1357
1358 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1359 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1360 &descriptor_set.set_, 0, nullptr);
1361 vk::CmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
1362
1363 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1364 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_a.handle(), 1, &buffer_region);
1365 m_errorMonitor->VerifyFound();
1366
1367 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1368 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
1369 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
1370 m_errorMonitor->VerifyFound();
1371
1372 m_commandBuffer->end();
1373 m_commandBuffer->reset();
1374
1375 // DispatchIndirect
Jeremy Gebben170781d2020-11-19 16:21:21 -07001376 VkBufferObj buffer_dispatchIndirect, buffer_dispatchIndirect2;
1377 buffer_usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1378 buffer_dispatchIndirect.init(
1379 *m_device, buffer_dispatchIndirect.create_info(sizeof(VkDispatchIndirectCommand), buffer_usage, nullptr), mem_prop);
1380 buffer_dispatchIndirect2.init(
1381 *m_device, buffer_dispatchIndirect2.create_info(sizeof(VkDispatchIndirectCommand), buffer_usage, nullptr), mem_prop);
1382 m_commandBuffer->begin();
1383 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1384 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1385 &descriptor_set.set_, 0, nullptr);
1386 vk::CmdDispatchIndirect(m_commandBuffer->handle(), buffer_dispatchIndirect.handle(), 0);
1387 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001388
1389 m_commandBuffer->reset();
1390 m_commandBuffer->begin();
1391
1392 buffer_region = {0, 0, sizeof(VkDispatchIndirectCommand)};
1393 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_dispatchIndirect2.handle(), buffer_dispatchIndirect.handle(), 1,
1394 &buffer_region);
1395 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1396 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1397 &descriptor_set.set_, 0, nullptr);
1398 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1399 vk::CmdDispatchIndirect(m_commandBuffer->handle(), buffer_dispatchIndirect.handle(), 0);
1400 m_errorMonitor->VerifyFound();
1401 m_commandBuffer->end();
1402
1403 // Draw
Jeremy Gebben170781d2020-11-19 16:21:21 -07001404 const float vbo_data[3] = {1.f, 0.f, 1.f};
1405 VkVertexInputAttributeDescription VertexInputAttributeDescription = {0, 0, VK_FORMAT_R32G32B32_SFLOAT, sizeof(vbo_data)};
1406 VkVertexInputBindingDescription VertexInputBindingDescription = {0, sizeof(vbo_data), VK_VERTEX_INPUT_RATE_VERTEX};
1407 VkBufferObj vbo, vbo2;
1408 buffer_usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1409 vbo.init(*m_device, vbo.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
1410 vbo2.init(*m_device, vbo2.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
1411
sfricke-samsungae54c1e2022-01-21 05:35:21 -08001412 VkShaderObj vs(this, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT);
1413 VkShaderObj fs(this, csSource, VK_SHADER_STAGE_FRAGMENT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001414
1415 CreatePipelineHelper g_pipe(*this);
1416 g_pipe.InitInfo();
1417 g_pipe.InitState();
1418 g_pipe.vi_ci_.pVertexBindingDescriptions = &VertexInputBindingDescription;
1419 g_pipe.vi_ci_.vertexBindingDescriptionCount = 1;
1420 g_pipe.vi_ci_.pVertexAttributeDescriptions = &VertexInputAttributeDescription;
1421 g_pipe.vi_ci_.vertexAttributeDescriptionCount = 1;
1422 g_pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
1423 g_pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
1424 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
1425
1426 m_commandBuffer->reset();
1427 m_commandBuffer->begin();
1428 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1429 VkDeviceSize offset = 0;
1430 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1431
1432 VkViewport viewport = {0, 0, 16, 16, 0, 1};
1433 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1434 VkRect2D scissor = {{0, 0}, {16, 16}};
1435 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1436
1437 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1438 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1439 &descriptor_set.set_, 0, nullptr);
1440 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1441 m_commandBuffer->EndRenderPass();
1442 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001443
1444 m_commandBuffer->reset();
1445 m_commandBuffer->begin();
1446
1447 buffer_region = {0, 0, sizeof(vbo_data)};
1448 vk::CmdCopyBuffer(m_commandBuffer->handle(), vbo2.handle(), vbo.handle(), 1, &buffer_region);
1449
1450 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1451 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1452 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1453 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1454 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1455 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1456 &descriptor_set.set_, 0, nullptr);
1457
1458 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1459 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1460 m_errorMonitor->VerifyFound();
1461
1462 m_commandBuffer->EndRenderPass();
1463 m_commandBuffer->end();
1464
John Zulaufbe8562b2020-12-15 14:21:01 -07001465 // Repeat the draw test with a WaitEvent to protect it.
John Zulaufbe8562b2020-12-15 14:21:01 -07001466 m_commandBuffer->reset();
1467 m_commandBuffer->begin();
1468
1469 vk::CmdCopyBuffer(m_commandBuffer->handle(), vbo2.handle(), vbo.handle(), 1, &buffer_region);
1470
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001471 auto vbo_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
John Zulaufbe8562b2020-12-15 14:21:01 -07001472 vbo_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1473 vbo_barrier.dstAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
1474 vbo_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1475 vbo_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1476 vbo_barrier.buffer = vbo.handle();
1477 vbo_barrier.offset = buffer_region.dstOffset;
1478 vbo_barrier.size = buffer_region.size;
1479
1480 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
1481
1482 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1483 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1484 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1485 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1486 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1487 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1488 &descriptor_set.set_, 0, nullptr);
1489
1490 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, nullptr, 1,
1491 &vbo_barrier, 0, nullptr);
1492 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1493
1494 m_commandBuffer->EndRenderPass();
1495 m_commandBuffer->end();
John Zulaufbe8562b2020-12-15 14:21:01 -07001496
Jeremy Gebben170781d2020-11-19 16:21:21 -07001497 // DrawIndexed
Jeremy Gebben170781d2020-11-19 16:21:21 -07001498 const float ibo_data[3] = {0.f, 0.f, 0.f};
1499 VkBufferObj ibo, ibo2;
1500 buffer_usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1501 ibo.init(*m_device, ibo.create_info(sizeof(ibo_data), buffer_usage, nullptr), mem_prop);
1502 ibo2.init(*m_device, ibo2.create_info(sizeof(ibo_data), buffer_usage, nullptr), mem_prop);
1503
1504 m_commandBuffer->reset();
1505 m_commandBuffer->begin();
1506 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1507 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1508 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1509 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1510 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1511
1512 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1513 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1514 &descriptor_set.set_, 0, nullptr);
1515 m_commandBuffer->DrawIndexed(3, 1, 0, 0, 0);
1516 m_commandBuffer->EndRenderPass();
1517 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001518
1519 m_commandBuffer->reset();
1520 m_commandBuffer->begin();
1521
1522 buffer_region = {0, 0, sizeof(ibo_data)};
1523 vk::CmdCopyBuffer(m_commandBuffer->handle(), ibo2.handle(), ibo.handle(), 1, &buffer_region);
1524
1525 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1526 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1527 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1528 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1529 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1530 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1531 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1532 &descriptor_set.set_, 0, nullptr);
1533
1534 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1535 m_commandBuffer->DrawIndexed(3, 1, 0, 0, 0);
1536 m_errorMonitor->VerifyFound();
1537
1538 m_commandBuffer->EndRenderPass();
1539 m_commandBuffer->end();
1540
1541 // DrawIndirect
Jeremy Gebben170781d2020-11-19 16:21:21 -07001542 VkBufferObj buffer_drawIndirect, buffer_drawIndirect2;
1543 buffer_usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1544 buffer_drawIndirect.init(*m_device, buffer_drawIndirect.create_info(sizeof(VkDrawIndirectCommand), buffer_usage, nullptr),
1545 mem_prop);
1546 buffer_drawIndirect2.init(*m_device, buffer_drawIndirect2.create_info(sizeof(VkDrawIndirectCommand), buffer_usage, nullptr),
1547 mem_prop);
1548
1549 m_commandBuffer->reset();
1550 m_commandBuffer->begin();
1551 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1552 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1553 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1554 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1555
1556 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1557 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1558 &descriptor_set.set_, 0, nullptr);
1559 vk::CmdDrawIndirect(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, 1, sizeof(VkDrawIndirectCommand));
1560 m_commandBuffer->EndRenderPass();
1561 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001562
1563 m_commandBuffer->reset();
1564 m_commandBuffer->begin();
1565
1566 buffer_region = {0, 0, sizeof(VkDrawIndirectCommand)};
1567 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_drawIndirect2.handle(), buffer_drawIndirect.handle(), 1, &buffer_region);
1568
1569 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1570 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1571 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1572 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1573 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1574 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1575 &descriptor_set.set_, 0, nullptr);
1576
1577 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1578 vk::CmdDrawIndirect(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, 1, sizeof(VkDrawIndirectCommand));
1579 m_errorMonitor->VerifyFound();
1580
1581 m_commandBuffer->EndRenderPass();
1582 m_commandBuffer->end();
1583
1584 // DrawIndexedIndirect
Jeremy Gebben170781d2020-11-19 16:21:21 -07001585 VkBufferObj buffer_drawIndexedIndirect, buffer_drawIndexedIndirect2;
1586 buffer_usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1587 buffer_drawIndexedIndirect.init(
1588 *m_device, buffer_drawIndexedIndirect.create_info(sizeof(VkDrawIndexedIndirectCommand), buffer_usage, nullptr), mem_prop);
1589 buffer_drawIndexedIndirect2.init(
1590 *m_device, buffer_drawIndexedIndirect2.create_info(sizeof(VkDrawIndexedIndirectCommand), buffer_usage, nullptr), mem_prop);
1591
1592 m_commandBuffer->reset();
1593 m_commandBuffer->begin();
1594 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1595 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1596 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1597 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1598 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1599
1600 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1601 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1602 &descriptor_set.set_, 0, nullptr);
1603 vk::CmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, 1, sizeof(VkDrawIndexedIndirectCommand));
1604 m_commandBuffer->EndRenderPass();
1605 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001606
1607 m_commandBuffer->reset();
1608 m_commandBuffer->begin();
1609
1610 buffer_region = {0, 0, sizeof(VkDrawIndexedIndirectCommand)};
1611 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_drawIndexedIndirect2.handle(), buffer_drawIndexedIndirect.handle(), 1,
1612 &buffer_region);
1613
1614 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1615 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1616 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1617 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1618 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1619 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1620 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1621 &descriptor_set.set_, 0, nullptr);
1622
1623 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1624 vk::CmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer_drawIndexedIndirect.handle(), 0, 1,
1625 sizeof(VkDrawIndexedIndirectCommand));
1626 m_errorMonitor->VerifyFound();
1627
1628 m_commandBuffer->EndRenderPass();
1629 m_commandBuffer->end();
1630
1631 if (has_khr_indirect) {
1632 // DrawIndirectCount
1633 auto fpCmdDrawIndirectCountKHR =
1634 (PFN_vkCmdDrawIndirectCount)vk::GetDeviceProcAddr(m_device->device(), "vkCmdDrawIndirectCountKHR");
1635 if (!fpCmdDrawIndirectCountKHR) {
1636 printf("%s Test requires unsupported vkCmdDrawIndirectCountKHR feature. Skipped.\n", kSkipPrefix);
1637 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -07001638 VkBufferObj buffer_count, buffer_count2;
1639 buffer_usage =
1640 VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1641 buffer_count.init(*m_device, buffer_count.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1642 buffer_count2.init(*m_device, buffer_count2.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1643
1644 m_commandBuffer->reset();
1645 m_commandBuffer->begin();
1646 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1647 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1648 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1649 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1650
1651 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1652 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1653 0, 1, &descriptor_set.set_, 0, nullptr);
1654 fpCmdDrawIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, buffer_count.handle(), 0, 1,
1655 sizeof(VkDrawIndirectCommand));
1656 m_commandBuffer->EndRenderPass();
1657 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001658
1659 m_commandBuffer->reset();
1660 m_commandBuffer->begin();
1661
1662 buffer_region = {0, 0, sizeof(uint32_t)};
1663 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_count2.handle(), buffer_count.handle(), 1, &buffer_region);
1664
1665 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1666 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1667 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1668 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1669 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1670 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1671 0, 1, &descriptor_set.set_, 0, nullptr);
1672
1673 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1674 fpCmdDrawIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, buffer_count.handle(), 0, 1,
1675 sizeof(VkDrawIndirectCommand));
1676 m_errorMonitor->VerifyFound();
1677
1678 m_commandBuffer->EndRenderPass();
1679 m_commandBuffer->end();
1680 }
1681
1682 // DrawIndexedIndirectCount
1683 auto fpCmdDrawIndexIndirectCountKHR =
1684 (PFN_vkCmdDrawIndirectCount)vk::GetDeviceProcAddr(m_device->device(), "vkCmdDrawIndexedIndirectCountKHR");
1685 if (!fpCmdDrawIndexIndirectCountKHR) {
1686 printf("%s Test requires unsupported vkCmdDrawIndexedIndirectCountKHR feature. Skipped.\n", kSkipPrefix);
1687 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -07001688 VkBufferObj buffer_count, buffer_count2;
1689 buffer_usage =
1690 VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1691 buffer_count.init(*m_device, buffer_count.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1692 buffer_count2.init(*m_device, buffer_count2.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1693
1694 m_commandBuffer->reset();
1695 m_commandBuffer->begin();
1696 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1697 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1698 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1699 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1700 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1701
1702 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1703 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1704 0, 1, &descriptor_set.set_, 0, nullptr);
1705 fpCmdDrawIndexIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndexedIndirect.handle(), 0, buffer_count.handle(),
1706 0, 1, sizeof(VkDrawIndexedIndirectCommand));
1707 m_commandBuffer->EndRenderPass();
1708 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001709
1710 m_commandBuffer->reset();
1711 m_commandBuffer->begin();
1712
1713 buffer_region = {0, 0, sizeof(uint32_t)};
1714 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_count2.handle(), buffer_count.handle(), 1, &buffer_region);
1715
1716 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1717 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1718 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1719 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1720 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1721 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1722 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1723 0, 1, &descriptor_set.set_, 0, nullptr);
1724
1725 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1726 fpCmdDrawIndexIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndexedIndirect.handle(), 0, buffer_count.handle(),
1727 0, 1, sizeof(VkDrawIndexedIndirectCommand));
1728 m_errorMonitor->VerifyFound();
1729
1730 m_commandBuffer->EndRenderPass();
1731 m_commandBuffer->end();
1732 }
1733 } else {
1734 printf("%s Test requires unsupported vkCmdDrawIndirectCountKHR & vkDrawIndexedIndirectCountKHR feature. Skipped.\n",
1735 kSkipPrefix);
1736 }
1737}
1738
1739TEST_F(VkSyncValTest, SyncCmdClear) {
1740 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1741 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
1742 // CmdClearColorImage
Jeremy Gebben170781d2020-11-19 16:21:21 -07001743 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1744 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1745 VkImageObj image_a(m_device), image_b(m_device);
1746 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
1747 image_a.Init(image_ci);
1748 image_b.Init(image_ci);
1749
1750 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1751 VkOffset3D zero_offset{0, 0, 0};
1752 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
1753 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
1754
1755 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
1756
1757 m_commandBuffer->begin();
1758
1759 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1760 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1761
1762 auto cb = m_commandBuffer->handle();
1763 VkClearColorValue ccv = {};
1764 vk::CmdClearColorImage(m_commandBuffer->handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
1765 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001766
1767 m_commandBuffer->reset();
1768 m_commandBuffer->begin();
1769 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
1770
1771 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1772 vk::CmdClearColorImage(m_commandBuffer->handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
1773 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1774 vk::CmdClearColorImage(m_commandBuffer->handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
1775 m_errorMonitor->VerifyFound();
1776
1777 m_commandBuffer->end();
1778
1779 // CmdClearDepthStencilImage
1780 format = FindSupportedDepthStencilFormat(gpu());
1781 if (!format) {
1782 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
1783 return;
1784 }
Jeremy Gebben170781d2020-11-19 16:21:21 -07001785 VkImageObj image_ds_a(m_device), image_ds_b(m_device);
1786 image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
1787 image_ds_a.Init(image_ci);
1788 image_ds_b.Init(image_ci);
1789
1790 const VkImageAspectFlags ds_aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
1791 image_ds_a.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_GENERAL);
1792 image_ds_b.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_GENERAL);
1793
1794 m_commandBuffer->begin();
1795 const VkClearDepthStencilValue clear_value = {};
1796 VkImageSubresourceRange ds_range = {ds_aspect, 0, 1, 0, 1};
1797
1798 vk::CmdClearDepthStencilImage(cb, image_ds_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1, &ds_range);
1799 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001800
1801 VkImageSubresourceLayers ds_layers_all{ds_aspect, 0, 0, 1};
1802 VkImageCopy ds_full_region = {ds_layers_all, zero_offset, ds_layers_all, zero_offset, full_extent};
1803
1804 m_commandBuffer->reset();
1805 m_commandBuffer->begin();
1806 vk::CmdCopyImage(cb, image_ds_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_ds_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1807 &ds_full_region);
1808
1809 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1810 vk::CmdClearDepthStencilImage(m_commandBuffer->handle(), image_ds_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1,
1811 &ds_range);
1812 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1813 vk::CmdClearDepthStencilImage(m_commandBuffer->handle(), image_ds_b.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1,
1814 &ds_range);
1815 m_errorMonitor->VerifyFound();
1816
1817 m_commandBuffer->end();
1818}
1819
1820TEST_F(VkSyncValTest, SyncCmdQuery) {
1821 // CmdCopyQueryPoolResults
Jeremy Gebben170781d2020-11-19 16:21:21 -07001822 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1823 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
1824 if (IsPlatform(kNexusPlayer)) {
1825 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
1826 return;
1827 }
1828 if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
1829 printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
1830 return;
1831 }
1832 uint32_t queue_count;
1833 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, NULL);
Jeremy Gebbend2573fc2021-05-12 17:17:38 -06001834 std::vector<VkQueueFamilyProperties> queue_props(queue_count);
1835 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, queue_props.data());
Jeremy Gebben170781d2020-11-19 16:21:21 -07001836 if (queue_props[m_device->graphics_queue_node_index_].timestampValidBits == 0) {
1837 printf("%s Device graphic queue has timestampValidBits of 0, skipping.\n", kSkipPrefix);
1838 return;
1839 }
1840
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001841 vk_testing::QueryPool query_pool;
sfricke-samsung6fc3e322022-02-15 22:41:29 -08001842 VkQueryPoolCreateInfo query_pool_create_info = LvlInitStruct<VkQueryPoolCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001843 query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
1844 query_pool_create_info.queryCount = 1;
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001845 query_pool.init(*m_device, query_pool_create_info);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001846
1847 VkBufferObj buffer_a, buffer_b;
1848 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1849 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
1850 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
1851
1852 VkBufferCopy region = {0, 0, 256};
1853
1854 auto cb = m_commandBuffer->handle();
1855 m_commandBuffer->begin();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001856 vk::CmdResetQueryPool(cb, query_pool.handle(), 0, 1);
1857 vk::CmdWriteTimestamp(cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool.handle(), 0);
1858 vk::CmdCopyQueryPoolResults(cb, query_pool.handle(), 0, 1, buffer_a.handle(), 0, 0, VK_QUERY_RESULT_WAIT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001859 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001860
1861 m_commandBuffer->reset();
1862 m_commandBuffer->begin();
1863 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001864 vk::CmdResetQueryPool(cb, query_pool.handle(), 0, 1);
1865 vk::CmdWriteTimestamp(cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool.handle(), 0);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001866 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001867 vk::CmdCopyQueryPoolResults(cb, query_pool.handle(), 0, 1, buffer_a.handle(), 0, 256, VK_QUERY_RESULT_WAIT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001868 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001869 vk::CmdCopyQueryPoolResults(cb, query_pool.handle(), 0, 1, buffer_b.handle(), 0, 256, VK_QUERY_RESULT_WAIT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001870 m_commandBuffer->end();
1871 m_errorMonitor->VerifyFound();
1872
1873 // TODO:Track VkQueryPool
1874 // TODO:CmdWriteTimestamp
Jeremy Gebben170781d2020-11-19 16:21:21 -07001875}
1876
1877TEST_F(VkSyncValTest, SyncCmdDrawDepthStencil) {
1878 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1879 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
Jeremy Gebben170781d2020-11-19 16:21:21 -07001880
1881 const auto format_ds = FindSupportedDepthStencilFormat(gpu());
1882 if (!format_ds) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06001883 GTEST_SKIP() << "No Depth + Stencil format found. Skipped.";
Jeremy Gebben170781d2020-11-19 16:21:21 -07001884 }
1885 const auto format_dp = FindSupportedDepthOnlyFormat(gpu());
1886 if (!format_dp) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06001887 GTEST_SKIP() << "No only Depth format found. Skipped.";
Jeremy Gebben170781d2020-11-19 16:21:21 -07001888 }
1889 const auto format_st = FindSupportedStencilOnlyFormat(gpu());
1890 if (!format_st) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06001891 GTEST_SKIP() << "No only Stencil format found. Skipped.";
Jeremy Gebben170781d2020-11-19 16:21:21 -07001892 }
1893
1894 VkDepthStencilObj image_ds(m_device), image_dp(m_device), image_st(m_device);
1895 image_ds.Init(m_device, 16, 16, format_ds);
1896 image_dp.Init(m_device, 16, 16, format_dp);
1897 image_st.Init(m_device, 16, 16, format_st);
1898
1899 VkRenderpassObj rp_ds(m_device, format_ds, true), rp_dp(m_device, format_dp, true), rp_st(m_device, format_st, true);
1900
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001901 vk_testing::Framebuffer fb_ds, fb_dp, fb_st;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001902 VkFramebufferCreateInfo fbci = {
1903 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp_ds.handle(), 1, image_ds.BindInfo(), 16, 16, 1};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001904 fb_ds.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001905 fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp_dp.handle(), 1, image_dp.BindInfo(), 16, 16, 1};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001906 fb_dp.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001907 fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp_st.handle(), 1, image_st.BindInfo(), 16, 16, 1};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001908 fb_st.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001909
1910 VkStencilOpState stencil = {};
1911 stencil.failOp = VK_STENCIL_OP_KEEP;
1912 stencil.passOp = VK_STENCIL_OP_KEEP;
1913 stencil.depthFailOp = VK_STENCIL_OP_KEEP;
1914 stencil.compareOp = VK_COMPARE_OP_NEVER;
1915
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001916 auto ds_ci = LvlInitStruct<VkPipelineDepthStencilStateCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001917 ds_ci.depthTestEnable = VK_TRUE;
1918 ds_ci.depthWriteEnable = VK_TRUE;
1919 ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
1920 ds_ci.stencilTestEnable = VK_TRUE;
1921 ds_ci.front = stencil;
1922 ds_ci.back = stencil;
1923
1924 CreatePipelineHelper g_pipe_ds(*this), g_pipe_dp(*this), g_pipe_st(*this);
1925 g_pipe_ds.InitInfo();
1926 g_pipe_ds.gp_ci_.renderPass = rp_ds.handle();
1927 g_pipe_ds.gp_ci_.pDepthStencilState = &ds_ci;
1928 g_pipe_ds.InitState();
1929 ASSERT_VK_SUCCESS(g_pipe_ds.CreateGraphicsPipeline());
1930 g_pipe_dp.InitInfo();
1931 g_pipe_dp.gp_ci_.renderPass = rp_dp.handle();
1932 ds_ci.stencilTestEnable = VK_FALSE;
1933 g_pipe_dp.gp_ci_.pDepthStencilState = &ds_ci;
1934 g_pipe_dp.InitState();
1935 ASSERT_VK_SUCCESS(g_pipe_dp.CreateGraphicsPipeline());
1936 g_pipe_st.InitInfo();
1937 g_pipe_st.gp_ci_.renderPass = rp_st.handle();
1938 ds_ci.depthTestEnable = VK_FALSE;
1939 ds_ci.stencilTestEnable = VK_TRUE;
1940 g_pipe_st.gp_ci_.pDepthStencilState = &ds_ci;
1941 g_pipe_st.InitState();
1942 ASSERT_VK_SUCCESS(g_pipe_st.CreateGraphicsPipeline());
1943
1944 m_commandBuffer->begin();
1945 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
1946 m_renderPassBeginInfo.pClearValues = nullptr;
1947 m_renderPassBeginInfo.clearValueCount = 0;
1948
1949 m_renderPassBeginInfo.renderPass = rp_ds.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001950 m_renderPassBeginInfo.framebuffer = fb_ds.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001951 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1952 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_ds.pipeline_);
1953 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1954 m_commandBuffer->EndRenderPass();
1955
1956 m_renderPassBeginInfo.renderPass = rp_dp.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001957 m_renderPassBeginInfo.framebuffer = fb_dp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001958 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1959 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_dp.pipeline_);
1960 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1961 m_commandBuffer->EndRenderPass();
1962
1963 m_renderPassBeginInfo.renderPass = rp_st.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001964 m_renderPassBeginInfo.framebuffer = fb_st.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001965 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1966 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_st.pipeline_);
1967 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1968 m_commandBuffer->EndRenderPass();
1969
1970 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001971
1972 m_commandBuffer->reset();
1973 m_commandBuffer->begin();
1974
1975 VkImageCopy copyRegion;
1976 copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1977 copyRegion.srcSubresource.mipLevel = 0;
1978 copyRegion.srcSubresource.baseArrayLayer = 0;
1979 copyRegion.srcSubresource.layerCount = 1;
1980 copyRegion.srcOffset = {0, 0, 0};
1981 copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1982 copyRegion.dstSubresource.mipLevel = 0;
1983 copyRegion.dstSubresource.baseArrayLayer = 0;
1984 copyRegion.dstSubresource.layerCount = 1;
1985 copyRegion.dstOffset = {0, 0, 0};
1986 copyRegion.extent = {16, 16, 1};
1987
1988 m_commandBuffer->CopyImage(image_ds.handle(), VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, image_dp.handle(),
1989 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, 1, &copyRegion);
1990
1991 copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1992 copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1993 m_commandBuffer->CopyImage(image_ds.handle(), VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, image_st.handle(),
1994 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, 1, &copyRegion);
1995 m_renderPassBeginInfo.renderPass = rp_ds.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001996 m_renderPassBeginInfo.framebuffer = fb_ds.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001997 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1998 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1999 m_errorMonitor->VerifyFound();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002000
2001 m_renderPassBeginInfo.renderPass = rp_dp.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002002 m_renderPassBeginInfo.framebuffer = fb_dp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002003 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2004 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2005 m_errorMonitor->VerifyFound();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002006
2007 m_renderPassBeginInfo.renderPass = rp_st.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002008 m_renderPassBeginInfo.framebuffer = fb_st.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002009 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2010 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2011 m_errorMonitor->VerifyFound();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002012}
2013
John Zulaufd57a36b2021-08-16 10:34:44 -06002014
Jeremy Gebben170781d2020-11-19 16:21:21 -07002015TEST_F(VkSyncValTest, RenderPassLoadHazardVsInitialLayout) {
2016 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
John Zulaufd57a36b2021-08-16 10:34:44 -06002017 bool do_none_load_op_test = false;
2018 if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME)) {
2019 m_device_extension_names.push_back(VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME);
2020 do_none_load_op_test = true;
2021 }
2022
Jeremy Gebben170781d2020-11-19 16:21:21 -07002023 ASSERT_NO_FATAL_FAILURE(InitState());
2024 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2025
2026 VkImageUsageFlags usage_color = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2027 VkImageUsageFlags usage_input = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2028 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
2029 VkImageObj image_color(m_device), image_input(m_device);
2030 auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 1, format, usage_color, VK_IMAGE_TILING_OPTIMAL);
2031 image_color.Init(image_ci);
2032 image_ci.usage = usage_input;
2033 image_input.Init(image_ci);
2034 VkImageView attachments[] = {image_color.targetView(format), image_input.targetView(format)};
2035
John Zulaufd57a36b2021-08-16 10:34:44 -06002036 VkAttachmentDescription attachmentDescriptions[] = {
Jeremy Gebben170781d2020-11-19 16:21:21 -07002037 // Result attachment
2038 {(VkAttachmentDescriptionFlags)0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_CLEAR,
2039 VK_ATTACHMENT_STORE_OP_STORE, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
2040 VK_IMAGE_LAYOUT_UNDEFINED, // Here causes DesiredError that SYNC-HAZARD-NONE in BeginRenderPass.
2041 // It should be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
2042 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
2043 // Input attachment
2044 {(VkAttachmentDescriptionFlags)0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD,
2045 VK_ATTACHMENT_STORE_OP_STORE, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
2046 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL}};
2047
2048 const VkAttachmentReference resultAttachmentRef = {0u, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2049 const VkAttachmentReference inputAttachmentRef = {1u, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
2050
2051 const VkSubpassDescription subpassDescription = {(VkSubpassDescriptionFlags)0,
2052 VK_PIPELINE_BIND_POINT_GRAPHICS,
2053 1u,
2054 &inputAttachmentRef,
2055 1u,
2056 &resultAttachmentRef,
2057 0,
2058 0,
2059 0u,
2060 0};
2061
2062 const VkSubpassDependency subpassDependency = {VK_SUBPASS_EXTERNAL,
2063 0,
2064 VK_PIPELINE_STAGE_TRANSFER_BIT,
2065 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
2066 VK_ACCESS_TRANSFER_WRITE_BIT,
2067 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT,
2068 VK_DEPENDENCY_BY_REGION_BIT};
2069
2070 const VkRenderPassCreateInfo renderPassInfo = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
2071 0,
2072 (VkRenderPassCreateFlags)0,
2073 2u,
2074 attachmentDescriptions,
2075 1u,
2076 &subpassDescription,
2077 1u,
2078 &subpassDependency};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002079 vk_testing::RenderPass rp;
2080 rp.init(*m_device, renderPassInfo);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002081
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002082 vk_testing::Framebuffer fb;
2083 VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp.handle(), 2, attachments, 32, 32, 1};
2084 fb.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002085
2086 image_input.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
2087
2088 m_commandBuffer->begin();
2089
2090 m_renderPassBeginInfo.renderArea = {{0, 0}, {32, 32}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002091 m_renderPassBeginInfo.renderPass = rp.handle();
2092 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002093
2094 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
2095 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2096 // Even though we have no accesses prior, the layout transition *is* an access, so load can be validated vs. layout transition
2097 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2098 m_errorMonitor->VerifyFound();
John Zulaufd57a36b2021-08-16 10:34:44 -06002099
2100 vk_testing::RenderPass rp_no_load_store;
2101 if (do_none_load_op_test) {
John Zulaufd57a36b2021-08-16 10:34:44 -06002102 attachmentDescriptions[0].loadOp = VK_ATTACHMENT_LOAD_OP_NONE_EXT;
2103 attachmentDescriptions[0].storeOp = VK_ATTACHMENT_STORE_OP_NONE_EXT;
2104 attachmentDescriptions[1].loadOp = VK_ATTACHMENT_LOAD_OP_NONE_EXT;
2105 attachmentDescriptions[1].storeOp = VK_ATTACHMENT_STORE_OP_NONE_EXT;
2106 rp_no_load_store.init(*m_device, renderPassInfo);
2107 m_renderPassBeginInfo.renderPass = rp_no_load_store.handle();
2108 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2109 m_commandBuffer->EndRenderPass();
John Zulaufd57a36b2021-08-16 10:34:44 -06002110 } else {
2111 printf("%s VK_EXT_load_store_op_none not supported, skipping sub-test\n", kSkipPrefix);
2112 }
Jeremy Gebben170781d2020-11-19 16:21:21 -07002113}
2114
2115TEST_F(VkSyncValTest, SyncRenderPassWithWrongDepthStencilInitialLayout) {
2116 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2117 ASSERT_NO_FATAL_FAILURE(InitState());
2118 if (IsPlatform(kNexusPlayer)) {
2119 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
2120 return;
2121 }
2122
2123 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2124
2125 VkFormat color_format = VK_FORMAT_R8G8B8A8_UNORM;
2126 VkFormat ds_format = FindSupportedDepthStencilFormat(gpu());
2127 if (!ds_format) {
2128 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
2129 return;
2130 }
2131 VkImageUsageFlags usage_color = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2132 VkImageUsageFlags usage_ds = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
2133 VkImageObj image_color(m_device), image_color2(m_device);
2134 auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 1, color_format, usage_color, VK_IMAGE_TILING_OPTIMAL);
2135 image_color.Init(image_ci);
2136 image_color2.Init(image_ci);
2137 VkDepthStencilObj image_ds(m_device);
2138 image_ds.Init(m_device, 32, 32, ds_format, usage_ds);
2139
2140 const VkAttachmentDescription colorAttachmentDescription = {(VkAttachmentDescriptionFlags)0,
2141 color_format,
2142 VK_SAMPLE_COUNT_1_BIT,
2143 VK_ATTACHMENT_LOAD_OP_CLEAR,
2144 VK_ATTACHMENT_STORE_OP_STORE,
2145 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
2146 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2147 VK_IMAGE_LAYOUT_UNDEFINED,
2148 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2149
2150 const VkAttachmentDescription depthStencilAttachmentDescription = {
2151 (VkAttachmentDescriptionFlags)0, ds_format, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_CLEAR,
2152 VK_ATTACHMENT_STORE_OP_STORE, VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_STORE_OP_STORE,
2153 VK_IMAGE_LAYOUT_UNDEFINED, // Here causes DesiredError that SYNC-HAZARD-WRITE_AFTER_WRITE in BeginRenderPass.
2154 // It should be VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
2155 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
2156
2157 std::vector<VkAttachmentDescription> attachmentDescriptions;
2158 attachmentDescriptions.push_back(colorAttachmentDescription);
2159 attachmentDescriptions.push_back(depthStencilAttachmentDescription);
2160
2161 const VkAttachmentReference colorAttachmentRef = {0u, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2162
2163 const VkAttachmentReference depthStencilAttachmentRef = {1u, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
2164
2165 const VkSubpassDescription subpassDescription = {(VkSubpassDescriptionFlags)0,
2166 VK_PIPELINE_BIND_POINT_GRAPHICS,
2167 0u,
2168 0,
2169 1u,
2170 &colorAttachmentRef,
2171 0,
2172 &depthStencilAttachmentRef,
2173 0u,
2174 0};
2175
2176 const VkRenderPassCreateInfo renderPassInfo = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
2177 0,
2178 (VkRenderPassCreateFlags)0,
2179 (uint32_t)attachmentDescriptions.size(),
2180 &attachmentDescriptions[0],
2181 1u,
2182 &subpassDescription,
2183 0u,
2184 0};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002185 vk_testing::RenderPass rp;
2186 rp.init(*m_device, renderPassInfo);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002187
2188 VkImageView fb_attachments[] = {image_color.targetView(color_format),
2189 image_ds.targetView(ds_format, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)};
2190 const VkFramebufferCreateInfo fbci = {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002191 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, 0, 0u, rp.handle(), 2u, fb_attachments, 32, 32, 1u,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002192 };
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002193 vk_testing::Framebuffer fb;
2194 fb.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002195 fb_attachments[0] = image_color2.targetView(color_format);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002196 vk_testing::Framebuffer fb1;
2197 fb1.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002198
2199 CreatePipelineHelper g_pipe(*this);
2200 g_pipe.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002201 g_pipe.gp_ci_.renderPass = rp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002202
2203 VkStencilOpState stencil = {};
2204 stencil.failOp = VK_STENCIL_OP_KEEP;
2205 stencil.passOp = VK_STENCIL_OP_KEEP;
2206 stencil.depthFailOp = VK_STENCIL_OP_KEEP;
2207 stencil.compareOp = VK_COMPARE_OP_NEVER;
2208
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07002209 auto ds_ci = LvlInitStruct<VkPipelineDepthStencilStateCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002210 ds_ci.depthTestEnable = VK_TRUE;
2211 ds_ci.depthWriteEnable = VK_TRUE;
2212 ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
2213 ds_ci.stencilTestEnable = VK_TRUE;
2214 ds_ci.front = stencil;
2215 ds_ci.back = stencil;
2216
2217 g_pipe.gp_ci_.pDepthStencilState = &ds_ci;
2218 g_pipe.InitState();
2219 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
2220
2221 m_commandBuffer->begin();
Tony-LunarG73f37032021-06-07 11:47:03 -06002222 VkClearValue clear = {};
2223 std::array<VkClearValue, 2> clear_values = { {clear, clear} };
2224 m_renderPassBeginInfo.pClearValues = clear_values.data();
2225 m_renderPassBeginInfo.clearValueCount = clear_values.size();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002226 m_renderPassBeginInfo.renderArea = {{0, 0}, {32, 32}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002227 m_renderPassBeginInfo.renderPass = rp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002228
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002229 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002230 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2231 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
2232 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
2233 m_commandBuffer->EndRenderPass();
2234
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002235 m_renderPassBeginInfo.framebuffer = fb1.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002236
2237 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2238 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2239 m_errorMonitor->VerifyFound();
2240}
2241
John Zulauf01a49ee2022-07-13 11:37:08 -06002242struct CreateRenderPassHelper {
2243 struct SubpassDescriptionStore {
2244 const std::vector<VkAttachmentReference>& input_store;
2245 const std::vector<VkAttachmentReference>& color_store;
2246 VkSubpassDescription desc;
2247 SubpassDescriptionStore(const std::vector<VkAttachmentReference>& input, const std::vector<VkAttachmentReference>& color)
2248 : input_store(input), color_store(color) {
2249 desc = {
2250 0u,
2251 VK_PIPELINE_BIND_POINT_GRAPHICS,
2252 static_cast<uint32_t>(input_store.size()),
2253 input_store.data(),
2254 static_cast<uint32_t>(color_store.size()),
2255 color_store.data(),
2256 nullptr,
2257 nullptr,
2258 0u,
2259 nullptr,
2260 };
2261 if (desc.inputAttachmentCount == 0) {
2262 desc.pInputAttachments = nullptr;
2263 }
2264 if (desc.colorAttachmentCount == 0) {
2265 desc.pColorAttachments = nullptr;
2266 }
2267 }
2268 };
2269
John Zulauf2f5947d2022-07-27 15:36:31 -06002270 VkImageUsageFlags usage_color =
2271 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
John Zulauf01a49ee2022-07-13 11:37:08 -06002272 VkImageUsageFlags usage_input =
2273 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2274 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
2275 VkClearColorValue ccv = {};
2276
2277 VkDeviceObj* dev;
2278 const static uint32_t kDefaultImageSize = 64;
2279 uint32_t width = kDefaultImageSize;
2280 uint32_t height = kDefaultImageSize;
2281 std::shared_ptr<VkImageObj> image_color;
2282 std::shared_ptr<VkImageObj> image_input;
2283 VkImageView view_input = VK_NULL_HANDLE;
2284 VkImageView view_color = VK_NULL_HANDLE;
2285
2286 VkAttachmentReference color_ref;
2287 VkAttachmentReference input_ref;
2288 std::vector<VkImageView> attachments;
2289 VkAttachmentDescription fb_attach_desc;
2290 VkAttachmentDescription input_attach_desc;
2291 std::vector<VkAttachmentDescription> attachment_descs;
2292 std::vector<VkAttachmentReference> input_attachments;
2293 std::vector<VkAttachmentReference> color_attachments;
2294 std::vector<VkSubpassDependency> subpass_dep;
2295 std::vector<VkSubpassDescription> subpasses;
2296 std::vector<SubpassDescriptionStore> subpass_description_store;
2297 VkRenderPassCreateInfo render_pass_create_info;
John Zulauf2f5947d2022-07-27 15:36:31 -06002298 std::shared_ptr<vk_testing::RenderPass> render_pass;
John Zulauf01a49ee2022-07-13 11:37:08 -06002299 std::shared_ptr<vk_testing::Framebuffer> framebuffer;
2300 VkRenderPassBeginInfo render_pass_begin;
2301 std::vector<VkClearValue> clear_colors;
2302
2303 CreateRenderPassHelper(VkDeviceObj* dev_)
2304 : dev(dev_),
2305 image_color(std::make_shared<VkImageObj>(dev)),
2306 image_input(std::make_shared<VkImageObj>(dev)),
2307 color_ref(DefaultColorRef()),
2308 input_ref(DefaultInputRef()),
2309 fb_attach_desc(DefaultFbAttachDesc()),
2310 input_attach_desc(DefaultInputAttachDesc()) {}
2311
2312 CreateRenderPassHelper(const CreateRenderPassHelper& other) = default;
2313
2314 void InitImageAndView() {
2315 auto image_ci = VkImageObj::ImageCreateInfo2D(width, height, 1, 1, format, usage_input, VK_IMAGE_TILING_OPTIMAL);
2316 image_input->InitNoLayout(image_ci);
2317 image_ci.usage = usage_color;
2318 image_color->InitNoLayout(image_ci);
2319
2320 view_input = image_input->targetView(format);
2321 view_color = image_color->targetView(format);
2322 attachments = {view_color, view_input};
2323 }
2324
John Zulauf2f5947d2022-07-27 15:36:31 -06002325 static VkAttachmentReference DefaultColorRef() {
John Zulauf01a49ee2022-07-13 11:37:08 -06002326 return {
2327 0u,
2328 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
2329 };
2330 }
2331
John Zulauf2f5947d2022-07-27 15:36:31 -06002332 static VkAttachmentReference DefaultInputRef() {
John Zulauf01a49ee2022-07-13 11:37:08 -06002333 return {
2334 1u,
2335 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2336 };
2337 };
2338
John Zulauf2f5947d2022-07-27 15:36:31 -06002339 static VkAttachmentReference UnusedColorAttachmentRef() {
2340 return {
2341 VK_ATTACHMENT_UNUSED,
2342 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2343 };
2344 };
2345
John Zulauf01a49ee2022-07-13 11:37:08 -06002346 VkAttachmentDescription DefaultFbAttachDesc() {
2347 return VkAttachmentDescription{
2348 0u,
2349 format,
2350 VK_SAMPLE_COUNT_1_BIT,
2351 VK_ATTACHMENT_LOAD_OP_CLEAR,
2352 VK_ATTACHMENT_STORE_OP_STORE,
2353 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
2354 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2355 VK_IMAGE_LAYOUT_UNDEFINED,
2356 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
2357 };
2358 }
2359 VkAttachmentDescription DefaultInputAttachDesc() const {
2360 return VkAttachmentDescription{
2361 0u,
2362 format,
2363 VK_SAMPLE_COUNT_1_BIT,
2364 VK_ATTACHMENT_LOAD_OP_LOAD,
2365 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2366 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
2367 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2368 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2369 VK_IMAGE_LAYOUT_GENERAL,
2370 };
2371 }
2372
John Zulauf2f5947d2022-07-27 15:36:31 -06002373 void InitAllAttachmentsToLayoutGeneral() {
2374 fb_attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
2375 fb_attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
2376 color_ref.layout = VK_IMAGE_LAYOUT_GENERAL;
2377 input_attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
2378 input_attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
2379 input_ref.layout = VK_IMAGE_LAYOUT_GENERAL;
2380 }
2381
2382 void SetAttachmentLayout(VkImageObj* image, const VkAttachmentDescription& attach_desc) {
2383 if (image && image->initialized() && (attach_desc.initialLayout != VK_IMAGE_LAYOUT_UNDEFINED)) {
2384 image->SetLayout(attach_desc.initialLayout);
2385 }
2386 }
2387
2388 void SetColorLayout() { SetAttachmentLayout(image_color.get(), fb_attach_desc); }
2389 void SetInputLayout() { SetAttachmentLayout(image_input.get(), input_attach_desc); }
2390
2391 void InitAttachmentLayouts() {
2392 SetColorLayout();
2393 SetInputLayout();
2394 }
2395
John Zulauf01a49ee2022-07-13 11:37:08 -06002396 void InitAttachmentArrays() {
2397 // Add attachments
2398 if (attachment_descs.empty()) {
2399 attachment_descs = {fb_attach_desc, input_attach_desc};
2400 }
2401 if (color_attachments.empty()) {
2402 color_attachments = {color_ref};
2403 }
2404 if (input_attachments.empty()) {
2405 input_attachments = {input_ref};
2406 }
2407 }
2408
John Zulauf2f5947d2022-07-27 15:36:31 -06002409 void AddSubpassDescription(const std::vector<VkAttachmentReference>& input, const std::vector<VkAttachmentReference> color) {
2410 subpass_description_store.emplace_back(input, color);
2411 }
2412
2413 // Capture the current input and color attachements, which can then be modified
2414 void AddSubpassDescription() { subpass_description_store.emplace_back(input_attachments, color_attachments); }
2415
John Zulauf01a49ee2022-07-13 11:37:08 -06002416 // This is the default for a single subpass renderpass, don't call if you want to change that
2417 void InitSubpassDescription() {
2418 if (subpass_description_store.empty()) {
John Zulauf2f5947d2022-07-27 15:36:31 -06002419 AddSubpassDescription();
John Zulauf01a49ee2022-07-13 11:37:08 -06002420 }
2421 }
2422
2423 void InitSubpasses() {
2424 if (subpasses.empty()) {
2425 subpasses.reserve(subpass_description_store.size());
2426 for (const auto& desc_store : subpass_description_store) {
2427 subpasses.emplace_back(desc_store.desc);
2428 }
2429 }
2430 }
2431
2432 void InitRenderPassInfo() {
2433 render_pass_create_info = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
2434 nullptr,
2435 0u,
2436 static_cast<uint32_t>(attachment_descs.size()),
2437 attachment_descs.data(),
2438 static_cast<uint32_t>(subpasses.size()),
2439 subpasses.data(),
2440 static_cast<uint32_t>(subpass_dep.size()),
2441 subpass_dep.data()};
2442 }
2443
2444 void InitRenderPass() {
2445 InitAttachmentArrays();
2446 InitSubpassDescription();
2447 InitSubpasses();
2448 InitRenderPassInfo();
John Zulauf2f5947d2022-07-27 15:36:31 -06002449 render_pass = std::make_shared<vk_testing::RenderPass>();
2450 render_pass->init(*dev, render_pass_create_info);
John Zulauf01a49ee2022-07-13 11:37:08 -06002451 }
2452
2453 void InitFramebuffer() {
2454 framebuffer = std::make_shared<vk_testing::Framebuffer>();
2455 VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
2456 0,
2457 0u,
John Zulauf2f5947d2022-07-27 15:36:31 -06002458 render_pass->handle(),
John Zulauf01a49ee2022-07-13 11:37:08 -06002459 static_cast<uint32_t>(attachments.size()),
2460 attachments.data(),
2461 width,
2462 height,
2463 1u};
2464 framebuffer->init(*dev, fbci);
2465 }
2466
2467 void InitState() {
2468 InitImageAndView();
2469 }
2470
2471 void InitBeginInfo() {
2472 render_pass_begin = lvl_init_struct<VkRenderPassBeginInfo>();
2473 render_pass_begin.renderArea = {{0, 0}, {width, height}};
John Zulauf2f5947d2022-07-27 15:36:31 -06002474 render_pass_begin.renderPass = render_pass->handle();
John Zulauf01a49ee2022-07-13 11:37:08 -06002475 render_pass_begin.framebuffer = framebuffer->handle();
2476
2477 // Simplistic ensure enough clear colors, if not provided
2478 // TODO: Should eventually be smart enough to fill in color/depth as appropos
2479 VkClearValue fill_in;
2480 fill_in.color = ccv;
2481 for (size_t i = clear_colors.size(); i < attachments.size(); ++i) {
2482 clear_colors.push_back(fill_in);
2483 }
2484 render_pass_begin.clearValueCount = static_cast<uint32_t>(clear_colors.size());
2485 render_pass_begin.pClearValues = clear_colors.data();
2486 }
2487
John Zulauf2f5947d2022-07-27 15:36:31 -06002488 void InitPipelineHelper(CreatePipelineHelper& g_pipe) {
2489 g_pipe.InitInfo();
2490 g_pipe.ResetShaderInfo(bindStateVertShaderText, bindStateFragSubpassLoadInputText);
2491 g_pipe.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
2492 g_pipe.gp_ci_.renderPass = render_pass->handle();
2493 g_pipe.InitState();
2494 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
2495 }
2496
John Zulauf01a49ee2022-07-13 11:37:08 -06002497 void Init() {
2498 InitState();
2499 InitRenderPass();
2500 InitFramebuffer();
2501 InitBeginInfo();
2502 }
2503};
2504
2505struct SyncTestPipeline {
2506 VkLayerTest& test;
2507 VkRenderPass rp;
2508 CreatePipelineHelper g_pipe;
2509 VkShaderObj vs;
2510 VkShaderObj fs;
2511 VkSamplerCreateInfo sampler_info;
2512 vk_testing::Sampler sampler;
2513 VkImageView view_input = VK_NULL_HANDLE;
2514 SyncTestPipeline(VkLayerTest& test_, VkRenderPass rp_)
2515 : test(test_),
2516 rp(rp_),
2517 g_pipe(test),
2518 vs(&test, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT),
2519 fs(&test, bindStateFragSubpassLoadInputText, VK_SHADER_STAGE_FRAGMENT_BIT),
2520 sampler_info(SafeSaneSamplerCreateInfo()),
2521 sampler() {}
2522 void InitState() {
2523 VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
2524 sampler.init(*test.DeviceObj(), sampler_info);
2525 g_pipe.InitInfo();
2526 g_pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2527 g_pipe.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
2528 g_pipe.gp_ci_.renderPass = rp;
2529 g_pipe.InitState();
2530 }
2531 void Init() {
2532 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
2533 g_pipe.descriptor_set_->WriteDescriptorImageInfo(0, view_input, sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2534 g_pipe.descriptor_set_->UpdateDescriptorSets();
2535 }
2536};
2537
Jeremy Gebben170781d2020-11-19 16:21:21 -07002538TEST_F(VkSyncValTest, SyncLayoutTransition) {
2539 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2540 ASSERT_NO_FATAL_FAILURE(InitState());
2541 if (IsPlatform(kNexusPlayer)) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06002542 GTEST_SKIP() << "This test should not run on Nexus Player";
Jeremy Gebben170781d2020-11-19 16:21:21 -07002543 }
2544
John Zulauf01a49ee2022-07-13 11:37:08 -06002545 CreateRenderPassHelper rp_helper(m_device);
2546 rp_helper.Init();
2547 const VkImage image_input_handle = rp_helper.image_input->handle();
John Zulauf2f5947d2022-07-27 15:36:31 -06002548 const VkRenderPass rp = rp_helper.render_pass->handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002549
John Zulauf01a49ee2022-07-13 11:37:08 -06002550 SyncTestPipeline st_pipe(*this, rp);
2551 st_pipe.InitState();
2552 st_pipe.view_input = rp_helper.view_input;
2553 st_pipe.Init();
2554 const auto& g_pipe = st_pipe.g_pipe;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002555
2556 m_commandBuffer->begin();
2557 auto cb = m_commandBuffer->handle();
2558 VkClearColorValue ccv = {};
2559 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
2560
2561 const VkImageMemoryBarrier preClearBarrier = {
2562 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 0, 0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
John Zulauf01a49ee2022-07-13 11:37:08 -06002563 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 0, 0, image_input_handle, full_subresource_range,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002564 };
2565 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 1u,
2566 &preClearBarrier);
2567
John Zulauf01a49ee2022-07-13 11:37:08 -06002568 vk::CmdClearColorImage(m_commandBuffer->handle(), image_input_handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &ccv, 1,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002569 &full_subresource_range);
2570
2571 const VkImageMemoryBarrier postClearBarrier = {
2572 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2573 0,
2574 VK_ACCESS_TRANSFER_WRITE_BIT,
John Zulauffa44ab22022-07-14 15:12:28 -06002575 VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002576 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2577 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2578 0,
2579 0,
John Zulauf01a49ee2022-07-13 11:37:08 -06002580 image_input_handle,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002581 full_subresource_range,
2582 };
John Zulauffa44ab22022-07-14 15:12:28 -06002583 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT,
2584 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0u, 0u, nullptr,
2585 0u, nullptr, 1u, &postClearBarrier);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002586
John Zulauf01a49ee2022-07-13 11:37:08 -06002587 m_commandBuffer->BeginRenderPass(rp_helper.render_pass_begin);
John Zulauffa44ab22022-07-14 15:12:28 -06002588 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
2589 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
2590 &g_pipe.descriptor_set_->set_, 0, nullptr);
2591
2592 // Positive test for ordering rules between load and input attachment usage
2593 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
2594
2595 // Positive test for store ordering vs. input attachment and dependency *to* external for layout transition
2596 m_commandBuffer->EndRenderPass();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002597
2598 // Catch a conflict with the input attachment final layout transition
2599 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
John Zulauf01a49ee2022-07-13 11:37:08 -06002600 vk::CmdClearColorImage(m_commandBuffer->handle(), image_input_handle, VK_IMAGE_LAYOUT_GENERAL, &ccv, 1,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002601 &full_subresource_range);
2602 m_errorMonitor->VerifyFound();
John Zulaufe972b752021-05-04 15:47:17 -06002603
2604 // There should be no hazard for ILT after ILT
John Zulaufe972b752021-05-04 15:47:17 -06002605 m_commandBuffer->end();
2606 m_commandBuffer->reset();
2607 m_commandBuffer->begin();
2608 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 1u,
2609 &preClearBarrier);
2610 const VkImageMemoryBarrier wawBarrier = {
2611 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2612 0,
2613 VK_ACCESS_SHADER_READ_BIT,
2614 VK_ACCESS_SHADER_READ_BIT,
2615 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2616 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2617 0,
2618 0,
John Zulauf01a49ee2022-07-13 11:37:08 -06002619 image_input_handle,
John Zulaufe972b752021-05-04 15:47:17 -06002620 full_subresource_range,
2621 };
John Zulaufe972b752021-05-04 15:47:17 -06002622 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0u, 0u, nullptr, 0u,
2623 nullptr, 1u, &wawBarrier);
John Zulaufe972b752021-05-04 15:47:17 -06002624 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002625}
2626
2627TEST_F(VkSyncValTest, SyncSubpassMultiDep) {
2628 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2629 ASSERT_NO_FATAL_FAILURE(InitState());
2630 if (IsPlatform(kNexusPlayer)) {
2631 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
2632 return;
2633 }
2634
John Zulauf01a49ee2022-07-13 11:37:08 -06002635 CreateRenderPassHelper rp_helper_positive(m_device);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002636
Jeremy Gebben170781d2020-11-19 16:21:21 -07002637 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
2638 VkImageSubresourceLayers mip_0_layer_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
2639 VkOffset3D image_zero{0, 0, 0};
John Zulauf01a49ee2022-07-13 11:37:08 -06002640 VkExtent3D image_size{rp_helper_positive.width, rp_helper_positive.height, 1};
2641
Jeremy Gebben170781d2020-11-19 16:21:21 -07002642 VkImageCopy full_region{mip_0_layer_0, image_zero, mip_0_layer_0, image_zero, image_size};
2643
John Zulauf01a49ee2022-07-13 11:37:08 -06002644 rp_helper_positive.InitState();
John Zulauf2f5947d2022-07-27 15:36:31 -06002645 rp_helper_positive.InitAllAttachmentsToLayoutGeneral();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002646
John Zulauf01a49ee2022-07-13 11:37:08 -06002647 // Copy the comon state to the other renderpass helper
2648 CreateRenderPassHelper rp_helper_negative(m_device);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002649
John Zulauf01a49ee2022-07-13 11:37:08 -06002650 auto& subpass_dep_positive = rp_helper_positive.subpass_dep;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002651
John Zulauf01a49ee2022-07-13 11:37:08 -06002652 subpass_dep_positive.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2653 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2654 VK_DEPENDENCY_VIEW_LOCAL_BIT});
2655 subpass_dep_positive.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2656 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT,
2657 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, VK_DEPENDENCY_VIEW_LOCAL_BIT});
2658 subpass_dep_positive.push_back({0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
2659 VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2660 VK_ACCESS_TRANSFER_READ_BIT, VK_DEPENDENCY_VIEW_LOCAL_BIT});
2661 subpass_dep_positive.push_back({0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
2662 VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2663 VK_ACCESS_TRANSFER_WRITE_BIT, VK_DEPENDENCY_VIEW_LOCAL_BIT});
Jeremy Gebben170781d2020-11-19 16:21:21 -07002664
John Zulauf01a49ee2022-07-13 11:37:08 -06002665 rp_helper_positive.InitRenderPass();
2666 rp_helper_positive.InitFramebuffer();
2667 rp_helper_positive.InitBeginInfo();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002668
John Zulauf01a49ee2022-07-13 11:37:08 -06002669 auto& subpass_dep_negative = rp_helper_negative.subpass_dep;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002670 subpass_dep_negative.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2671 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2672 VK_DEPENDENCY_VIEW_LOCAL_BIT});
2673 // Show that the two barriers do *not* chain by breaking the positive barrier into two bits.
2674 subpass_dep_negative.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2675 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, 0,
2676 VK_DEPENDENCY_VIEW_LOCAL_BIT});
2677 subpass_dep_negative.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
2678 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
2679 VK_DEPENDENCY_VIEW_LOCAL_BIT});
John Zulauf01a49ee2022-07-13 11:37:08 -06002680 rp_helper_negative.InitRenderPass();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002681
John Zulauf01a49ee2022-07-13 11:37:08 -06002682 // Negative and postive RP's are compatible.
2683 rp_helper_negative.framebuffer = rp_helper_positive.framebuffer;
2684 rp_helper_negative.InitBeginInfo();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002685
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002686 vk_testing::Sampler sampler;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002687 VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002688 sampler.init(*m_device, sampler_info);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002689
Jeremy Gebben170781d2020-11-19 16:21:21 -07002690
2691 CreatePipelineHelper g_pipe(*this);
John Zulauf2f5947d2022-07-27 15:36:31 -06002692 rp_helper_positive.InitPipelineHelper(g_pipe);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002693
John Zulauf2f5947d2022-07-27 15:36:31 -06002694 g_pipe.descriptor_set_->WriteDescriptorImageInfo(0, rp_helper_positive.view_input, VK_NULL_HANDLE,
John Zulauf01a49ee2022-07-13 11:37:08 -06002695 VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002696 g_pipe.descriptor_set_->UpdateDescriptorSets();
2697
2698 m_commandBuffer->begin();
2699 auto cb = m_commandBuffer->handle();
2700 VkClearColorValue ccv = {};
2701
2702 const VkImageMemoryBarrier xferDestBarrier = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2703 nullptr,
2704 VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
2705 VK_ACCESS_TRANSFER_WRITE_BIT,
2706 VK_IMAGE_LAYOUT_GENERAL,
2707 VK_IMAGE_LAYOUT_GENERAL,
2708 VK_QUEUE_FAMILY_IGNORED,
2709 VK_QUEUE_FAMILY_IGNORED,
2710 VK_NULL_HANDLE,
2711 full_subresource_range};
2712 const VkImageMemoryBarrier xferDestToSrcBarrier = {
2713 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2714 nullptr,
2715 VK_ACCESS_TRANSFER_WRITE_BIT,
2716 VK_ACCESS_TRANSFER_READ_BIT,
2717 VK_IMAGE_LAYOUT_GENERAL,
2718 VK_IMAGE_LAYOUT_GENERAL,
2719 VK_QUEUE_FAMILY_IGNORED,
2720 VK_QUEUE_FAMILY_IGNORED,
2721 VK_NULL_HANDLE,
2722 full_subresource_range,
2723 };
2724
John Zulauf01a49ee2022-07-13 11:37:08 -06002725 const VkImage image_color = rp_helper_positive.image_color->handle();
2726 const VkImage image_input = rp_helper_positive.image_input->handle();
2727
Jeremy Gebben170781d2020-11-19 16:21:21 -07002728 VkImageMemoryBarrier preClearBarrier = xferDestBarrier;
John Zulauf01a49ee2022-07-13 11:37:08 -06002729 preClearBarrier.image = image_color;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002730
2731 VkImageMemoryBarrier preCopyBarriers[2] = {xferDestToSrcBarrier, xferDestBarrier};
John Zulauf01a49ee2022-07-13 11:37:08 -06002732 preCopyBarriers[0].image = image_color;
2733 preCopyBarriers[1].image = image_input;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002734 // Positive test for ordering rules between load and input attachment usage
Jeremy Gebben170781d2020-11-19 16:21:21 -07002735
2736 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 1u,
2737 &preClearBarrier);
2738
John Zulauf01a49ee2022-07-13 11:37:08 -06002739 vk::CmdClearColorImage(m_commandBuffer->handle(), image_color, VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002740
2741 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 2u,
2742 preCopyBarriers);
2743
John Zulauf01a49ee2022-07-13 11:37:08 -06002744 vk::CmdCopyImage(m_commandBuffer->handle(), image_color, VK_IMAGE_LAYOUT_GENERAL, image_input, VK_IMAGE_LAYOUT_GENERAL, 1u,
2745 &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002746
2747 // No post copy image barrier, we are testing the subpass dependencies
2748
Jeremy Gebben170781d2020-11-19 16:21:21 -07002749 // Postive renderpass multidependency test
John Zulauf01a49ee2022-07-13 11:37:08 -06002750 m_commandBuffer->BeginRenderPass(rp_helper_positive.render_pass_begin);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002751 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
2752 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
2753 &g_pipe.descriptor_set_->set_, 0, nullptr);
2754
2755 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
2756
2757 // Positive test for store ordering vs. input attachment and dependency *to* external for layout transition
2758 m_commandBuffer->EndRenderPass();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002759
John Zulauf01a49ee2022-07-13 11:37:08 -06002760 vk::CmdCopyImage(m_commandBuffer->handle(), image_color, VK_IMAGE_LAYOUT_GENERAL, image_input,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002761 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002762
Jeremy Gebben170781d2020-11-19 16:21:21 -07002763 // Postive renderpass multidependency test, will fail IFF the dependencies are acting indepently.
2764 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "SYNC-HAZARD-READ_AFTER_WRITE");
John Zulauf01a49ee2022-07-13 11:37:08 -06002765 m_commandBuffer->BeginRenderPass(rp_helper_negative.render_pass_begin);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002766 m_errorMonitor->VerifyFound();
2767}
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002768
2769TEST_F(VkSyncValTest, RenderPassAsyncHazard) {
2770 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2771 ASSERT_NO_FATAL_FAILURE(InitState());
2772
Nathaniel Cesariof9cd1a82021-07-24 08:48:55 -06002773 if (IsPlatform(kPixel3) || IsPlatform(kPixel3aXL)) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06002774 GTEST_SKIP() << "Temporarily disabling on Pixel 3 and Pixel 3a XL due to driver crash";
Nathaniel Cesariof9cd1a82021-07-24 08:48:55 -06002775 }
2776
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002777 // overall set up:
2778 // subpass 0:
2779 // write image 0
2780 // subpass 1:
2781 // read image 0
2782 // write image 1
2783 // subpass 2:
2784 // read image 0
2785 // write image 2
2786 // subpass 3:
2787 // read image 0
2788 // write image 3
2789 //
2790 // subpasses 1 & 2 can run in parallel but both should depend on 0
2791 // subpass 3 must run after 1 & 2 because otherwise the store operation will
2792 // race with the reads in the other subpasses.
2793
2794 constexpr VkFormat kFormat = VK_FORMAT_R8G8B8A8_UNORM;
2795 constexpr uint32_t kWidth = 32, kHeight = 32;
2796 constexpr uint32_t kNumImages = 4;
2797
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002798 VkImageCreateInfo src_img_info = LvlInitStruct<VkImageCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002799 src_img_info.flags = 0;
2800 src_img_info.imageType = VK_IMAGE_TYPE_2D;
2801 src_img_info.format = kFormat;
2802 src_img_info.extent = {kWidth, kHeight, 1};
2803 src_img_info.mipLevels = 1;
2804 src_img_info.arrayLayers = 1;
2805 src_img_info.samples = VK_SAMPLE_COUNT_2_BIT;
2806 src_img_info.tiling = VK_IMAGE_TILING_OPTIMAL;
2807 src_img_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2808 src_img_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2809 src_img_info.queueFamilyIndexCount = 0;
2810 src_img_info.pQueueFamilyIndices = nullptr;
2811 src_img_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2812
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002813 VkImageCreateInfo dst_img_info = LvlInitStruct<VkImageCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002814 dst_img_info.flags = 0;
2815 dst_img_info.imageType = VK_IMAGE_TYPE_2D;
2816 dst_img_info.format = kFormat;
2817 dst_img_info.extent = {kWidth, kHeight, 1};
2818 dst_img_info.mipLevels = 1;
2819 dst_img_info.arrayLayers = 1;
2820 dst_img_info.samples = VK_SAMPLE_COUNT_1_BIT;
2821 dst_img_info.tiling = VK_IMAGE_TILING_OPTIMAL;
2822 dst_img_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2823 dst_img_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2824 dst_img_info.queueFamilyIndexCount = 0;
2825 dst_img_info.pQueueFamilyIndices = nullptr;
2826 dst_img_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2827
2828 std::vector<std::unique_ptr<VkImageObj>> images;
2829 for (uint32_t i = 0; i < kNumImages; i++) {
2830 images.emplace_back(new VkImageObj(m_device));
2831 }
2832 images[0]->Init(src_img_info);
2833 for (uint32_t i = 1; i < images.size(); i++) {
2834 images[i]->Init(dst_img_info);
2835 }
2836
2837 std::array<VkImageView, kNumImages> attachments{};
2838 std::array<VkAttachmentDescription, kNumImages> attachment_descriptions{};
2839 std::array<VkAttachmentReference, kNumImages> color_refs{};
2840 std::array<VkImageMemoryBarrier, kNumImages> img_barriers{};
2841
2842 for (uint32_t i = 0; i < attachments.size(); i++) {
2843 attachments[i] = images[i]->targetView(kFormat);
2844 attachment_descriptions[i] = {};
2845 attachment_descriptions[i].flags = 0;
2846 attachment_descriptions[i].format = kFormat;
2847 attachment_descriptions[i].samples = VK_SAMPLE_COUNT_1_BIT;
2848 attachment_descriptions[i].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
2849 attachment_descriptions[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
2850 attachment_descriptions[i].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
2851 attachment_descriptions[i].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
2852 attachment_descriptions[i].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2853 attachment_descriptions[i].finalLayout =
2854 (i == 0) ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2855
2856 color_refs[i] = {i, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2857
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002858 img_barriers[i] = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002859 img_barriers[i].srcAccessMask = 0;
2860 img_barriers[i].dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
2861 img_barriers[i].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2862 img_barriers[i].newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2863 img_barriers[i].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2864 img_barriers[i].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2865 img_barriers[i].image = images[i]->handle();
2866 img_barriers[i].subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
2867 }
2868
2869 const VkAttachmentReference input_ref{0u, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
2870
2871 std::array<std::array<uint32_t, 2>, kNumImages - 1> preserve_subpass{{{2, 3}, {1, 3}, {1, 2}}};
2872
2873 std::array<VkSubpassDescription, kNumImages> subpasses{};
2874
2875 subpasses[0].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
2876 subpasses[0].inputAttachmentCount = 0;
2877 subpasses[0].pInputAttachments = nullptr;
2878 subpasses[0].colorAttachmentCount = 1;
2879 subpasses[0].pColorAttachments = &color_refs[0];
2880
2881 for (uint32_t i = 1; i < subpasses.size(); i++) {
2882 subpasses[i].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
2883 subpasses[i].inputAttachmentCount = 1;
2884 subpasses[i].pInputAttachments = &input_ref;
2885 subpasses[i].colorAttachmentCount = 1;
2886 subpasses[i].pColorAttachments = &color_refs[1];
2887 subpasses[i].preserveAttachmentCount = preserve_subpass[i - 1].size();
2888 subpasses[i].pPreserveAttachments = preserve_subpass[i - 1].data();
2889 }
2890
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002891 VkRenderPassCreateInfo renderpass_info = LvlInitStruct<VkRenderPassCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002892 renderpass_info.flags = 0;
2893 renderpass_info.attachmentCount = attachment_descriptions.size();
2894 renderpass_info.pAttachments = attachment_descriptions.data();
2895 renderpass_info.subpassCount = subpasses.size();
2896 renderpass_info.pSubpasses = subpasses.data();
2897 renderpass_info.dependencyCount = 0;
2898 renderpass_info.pDependencies = nullptr;
2899
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002900 VkFramebufferCreateInfo fbci = LvlInitStruct<VkFramebufferCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002901 fbci.flags = 0;
2902 fbci.attachmentCount = attachments.size();
2903 fbci.pAttachments = attachments.data();
2904 fbci.width = kWidth;
2905 fbci.height = kHeight;
2906 fbci.layers = 1;
2907
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002908 vk_testing::Sampler sampler;
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002909 VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002910 sampler.init(*m_device, sampler_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002911
sfricke-samsungae54c1e2022-01-21 05:35:21 -08002912 VkShaderObj vs(this, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT);
John Zulauf01a49ee2022-07-13 11:37:08 -06002913 VkShaderObj fs(this, bindStateFragSubpassLoadInputText, VK_SHADER_STAGE_FRAGMENT_BIT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002914
2915 VkClearValue clear = {};
2916 clear.color = m_clear_color;
Tony-LunarG73f37032021-06-07 11:47:03 -06002917 std::array<VkClearValue, 4> clear_values = {{clear, clear, clear, clear}};
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002918
2919 // run the renderpass with no dependencies
2920 {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002921 vk_testing::RenderPass rp;
2922 vk_testing::Framebuffer fb;
2923 rp.init(*m_device, renderpass_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002924
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002925 fbci.renderPass = rp.handle();
2926 fb.init(*m_device, fbci);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002927
2928 CreatePipelineHelper g_pipe_0(*this);
2929 g_pipe_0.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002930 g_pipe_0.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002931 g_pipe_0.InitState();
2932 ASSERT_VK_SUCCESS(g_pipe_0.CreateGraphicsPipeline());
2933
2934 CreatePipelineHelper g_pipe_12(*this);
2935 g_pipe_12.InitInfo();
2936 g_pipe_12.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2937 g_pipe_12.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002938 g_pipe_12.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002939 g_pipe_12.InitState();
2940 ASSERT_VK_SUCCESS(g_pipe_12.CreateGraphicsPipeline());
2941
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002942 g_pipe_12.descriptor_set_->WriteDescriptorImageInfo(0, attachments[0], sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002943 g_pipe_12.descriptor_set_->UpdateDescriptorSets();
2944
2945 m_commandBuffer->begin();
2946
2947 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
2948 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, img_barriers.size(),
2949 img_barriers.data());
2950
2951 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
2952 m_renderPassBeginInfo.pClearValues = clear_values.data();
2953 m_renderPassBeginInfo.clearValueCount = clear_values.size();
2954
2955 m_renderPassBeginInfo.renderArea = {{0, 0}, {kWidth, kHeight}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002956 m_renderPassBeginInfo.renderPass = rp.handle();
2957 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002958
2959 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
2960 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_);
2961 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_layout_.handle(), 0,
2962 1, &g_pipe_0.descriptor_set_->set_, 0, NULL);
2963
2964 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
2965
2966 for (uint32_t i = 1; i < subpasses.size(); i++) {
2967 vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
2968 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_12.pipeline_);
2969 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS,
2970 g_pipe_12.pipeline_layout_.handle(), 0, 1, &g_pipe_12.descriptor_set_->set_, 0, NULL);
2971
2972 // we're racing the writes from subpass 0 with our shader reads
2973 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ-RACING-WRITE");
2974 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
2975 m_errorMonitor->VerifyFound();
2976 }
2977
2978 // we should get an error from async checking in both subpasses 2 & 3
2979 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-WRITE");
2980 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-WRITE");
2981 vk::CmdEndRenderPass(m_commandBuffer->handle());
2982 m_errorMonitor->VerifyFound();
2983
2984 m_commandBuffer->end();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002985 }
2986
2987 // add dependencies from subpass 0 to the others, which are necessary but not sufficient
2988 std::vector<VkSubpassDependency> subpass_dependencies;
2989 for (uint32_t i = 1; i < subpasses.size(); i++) {
2990 VkSubpassDependency dep{0,
2991 i,
2992 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
2993 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
2994 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2995 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
2996 0};
2997 subpass_dependencies.push_back(dep);
2998 }
2999 renderpass_info.dependencyCount = subpass_dependencies.size();
3000 renderpass_info.pDependencies = subpass_dependencies.data();
3001
3002 {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003003 vk_testing::RenderPass rp;
3004 vk_testing::Framebuffer fb;
3005 rp.init(*m_device, renderpass_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003006
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003007 fbci.renderPass = rp.handle();
3008 fb.init(*m_device, fbci);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003009
3010 CreatePipelineHelper g_pipe_0(*this);
3011 g_pipe_0.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003012 g_pipe_0.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003013 g_pipe_0.InitState();
3014 ASSERT_VK_SUCCESS(g_pipe_0.CreateGraphicsPipeline());
3015
3016 CreatePipelineHelper g_pipe_12(*this);
3017 g_pipe_12.InitInfo();
3018 g_pipe_12.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
3019 g_pipe_12.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003020 g_pipe_12.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003021 g_pipe_12.InitState();
3022 ASSERT_VK_SUCCESS(g_pipe_12.CreateGraphicsPipeline());
3023
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003024 g_pipe_12.descriptor_set_->WriteDescriptorImageInfo(0, attachments[0], sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003025 g_pipe_12.descriptor_set_->UpdateDescriptorSets();
3026
3027 m_commandBuffer->begin();
3028
3029 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
3030 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, img_barriers.size(),
3031 img_barriers.data());
3032
3033 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
3034 m_renderPassBeginInfo.pClearValues = clear_values.data();
3035 m_renderPassBeginInfo.clearValueCount = clear_values.size();
3036
3037 m_renderPassBeginInfo.renderArea = {{0, 0}, {kWidth, kHeight}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003038 m_renderPassBeginInfo.renderPass = rp.handle();
3039 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003040
3041 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
3042 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_);
3043 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_layout_.handle(), 0,
3044 1, &g_pipe_0.descriptor_set_->set_, 0, NULL);
3045
3046 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3047
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003048 for (uint32_t i = 1; i < subpasses.size(); i++) {
3049 vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
3050 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_12.pipeline_);
3051 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS,
3052 g_pipe_12.pipeline_layout_.handle(), 0, 1, &g_pipe_12.descriptor_set_->set_, 0, NULL);
3053 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3054 }
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003055 // expect this error because 2 subpasses could try to do the store operation
3056 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-WRITE");
3057 // ... and this one because the store could happen during a shader read from another subpass
3058 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ");
3059 vk::CmdEndRenderPass(m_commandBuffer->handle());
3060 m_errorMonitor->VerifyFound();
3061
3062 m_commandBuffer->end();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003063 }
3064
3065 // try again with correct dependencies to make subpass 3 depend on 1 & 2
3066 for (uint32_t i = 1; i < (subpasses.size() - 1); i++) {
3067 VkSubpassDependency dep{i,
3068 static_cast<uint32_t>(subpasses.size() - 1),
3069 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
3070 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
3071 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
3072 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
3073 0};
3074 subpass_dependencies.push_back(dep);
3075 }
3076 renderpass_info.dependencyCount = subpass_dependencies.size();
3077 renderpass_info.pDependencies = subpass_dependencies.data();
3078 {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003079 vk_testing::RenderPass rp;
3080 vk_testing::Framebuffer fb;
3081 rp.init(*m_device, renderpass_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003082
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003083 fbci.renderPass = rp.handle();
3084 fb.init(*m_device, fbci);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003085
3086 CreatePipelineHelper g_pipe_0(*this);
3087 g_pipe_0.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003088 g_pipe_0.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003089 g_pipe_0.InitState();
3090 ASSERT_VK_SUCCESS(g_pipe_0.CreateGraphicsPipeline());
3091
3092 CreatePipelineHelper g_pipe_12(*this);
3093 g_pipe_12.InitInfo();
3094 g_pipe_12.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
3095 g_pipe_12.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003096 g_pipe_12.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003097 g_pipe_12.InitState();
3098 ASSERT_VK_SUCCESS(g_pipe_12.CreateGraphicsPipeline());
3099
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003100 g_pipe_12.descriptor_set_->WriteDescriptorImageInfo(0, attachments[0], sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003101 g_pipe_12.descriptor_set_->UpdateDescriptorSets();
3102
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003103 m_commandBuffer->begin();
3104 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
3105 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, img_barriers.size(),
3106 img_barriers.data());
3107
3108 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
3109 m_renderPassBeginInfo.pClearValues = clear_values.data();
3110 m_renderPassBeginInfo.clearValueCount = clear_values.size();
3111
3112 m_renderPassBeginInfo.renderArea = {{0, 0}, {kWidth, kHeight}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003113 m_renderPassBeginInfo.renderPass = rp.handle();
3114 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003115
3116 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
3117 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_);
3118 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_layout_.handle(), 0,
3119 1, &g_pipe_0.descriptor_set_->set_, 0, NULL);
3120
3121 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3122
3123 for (uint32_t i = 1; i < subpasses.size(); i++) {
3124 vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
3125 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_12.pipeline_);
3126 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS,
3127 g_pipe_12.pipeline_layout_.handle(), 0, 1, &g_pipe_12.descriptor_set_->set_, 0, NULL);
3128 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3129 }
3130
3131 vk::CmdEndRenderPass(m_commandBuffer->handle());
3132
3133 m_commandBuffer->end();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003134 }
3135}
John Zulauf025ee442020-12-15 11:44:19 -07003136
3137TEST_F(VkSyncValTest, SyncEventsBufferCopy) {
3138 TEST_DESCRIPTION("Check Set/Wait protection for a variety of use cases using buffer copies");
3139 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3140 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3141
3142 VkBufferObj buffer_a;
3143 VkBufferObj buffer_b;
3144 VkBufferObj buffer_c;
3145 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3146 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
3147 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
3148 buffer_c.init_as_src_and_dst(*m_device, 256, mem_prop);
3149
3150 VkBufferCopy region = {0, 0, 256};
3151 VkBufferCopy front2front = {0, 0, 128};
3152 VkBufferCopy front2back = {0, 128, 128};
3153 VkBufferCopy back2back = {128, 128, 128};
3154
3155 VkEventObj event;
3156 event.init(*m_device, VkEventObj::create_info(0));
3157 VkEvent event_handle = event.handle();
3158
3159 auto cb = m_commandBuffer->handle();
3160 m_commandBuffer->begin();
3161
3162 // Copy after set for WAR (note we are writing to the back half of c but only reading from the front
John Zulauf025ee442020-12-15 11:44:19 -07003163 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3164 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3165 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_c.handle(), 1, &back2back);
3166 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3167 nullptr, 0, nullptr);
3168 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
John Zulauf025ee442020-12-15 11:44:19 -07003169 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
3170 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2back);
3171 m_errorMonitor->VerifyFound();
3172 m_commandBuffer->end();
3173
3174 // WAR prevented
3175 m_commandBuffer->reset();
3176 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003177 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3178 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3179 // Just protect against WAR, only need a sync barrier.
3180 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3181 nullptr, 0, nullptr);
3182 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
John Zulauf025ee442020-12-15 11:44:19 -07003183
3184 // Wait shouldn't prevent this WAW though, as it's only a synchronization barrier
3185 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3186 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
3187 m_errorMonitor->VerifyFound();
3188 m_commandBuffer->end();
3189
3190 // Prevent WAR and WAW
3191 m_commandBuffer->reset();
3192 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003193 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3194 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003195 auto mem_barrier_waw = LvlInitStruct<VkMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003196 mem_barrier_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3197 mem_barrier_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3198 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 1,
3199 &mem_barrier_waw, 0, nullptr, 0, nullptr);
3200 // The WAW should be safe (on a memory barrier)
3201 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
3202 // The WAR should also be safe (on a sync barrier)
3203 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
John Zulauf025ee442020-12-15 11:44:19 -07003204 m_commandBuffer->end();
3205
3206 // Barrier range check for WAW
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003207 auto buffer_barrier_front_waw = LvlInitStruct<VkBufferMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003208 buffer_barrier_front_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3209 buffer_barrier_front_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3210 buffer_barrier_front_waw.buffer = buffer_b.handle();
3211 buffer_barrier_front_waw.offset = front2front.dstOffset;
3212 buffer_barrier_front_waw.size = front2front.size;
3213
3214 // Front safe, back WAW
3215 m_commandBuffer->reset();
3216 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003217 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3218 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3219 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 1,
3220 &buffer_barrier_front_waw, 0, nullptr);
3221 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
John Zulauf025ee442020-12-15 11:44:19 -07003222 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3223 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &back2back);
3224 m_errorMonitor->VerifyFound();
3225 m_commandBuffer->end();
3226}
3227
3228TEST_F(VkSyncValTest, SyncEventsCopyImageHazards) {
3229 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3230 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3231
3232 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3233 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3234 VkImageObj image_a(m_device);
3235 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
3236 image_a.Init(image_ci);
3237 ASSERT_TRUE(image_a.initialized());
3238
3239 VkImageObj image_b(m_device);
3240 image_b.Init(image_ci);
3241 ASSERT_TRUE(image_b.initialized());
3242
3243 VkImageObj image_c(m_device);
3244 image_c.Init(image_ci);
3245 ASSERT_TRUE(image_c.initialized());
3246
3247 VkEventObj event;
3248 event.init(*m_device, VkEventObj::create_info(0));
3249 VkEvent event_handle = event.handle();
3250
3251 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 2};
3252 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
3253 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
3254 VkImageSubresourceRange layers_0_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
3255 VkOffset3D zero_offset{0, 0, 0};
3256 VkOffset3D half_offset{64, 64, 0};
3257 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
3258 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
3259
3260 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
3261 VkImageCopy region_0_to_0 = {layers_0, zero_offset, layers_0, zero_offset, full_extent};
3262 VkImageCopy region_1_to_1 = {layers_1, zero_offset, layers_1, zero_offset, full_extent};
3263 VkImageCopy region_0_q0toq0 = {layers_0, zero_offset, layers_0, zero_offset, half_extent};
3264 VkImageCopy region_0_q0toq3 = {layers_0, zero_offset, layers_0, half_offset, half_extent};
3265 VkImageCopy region_0_q3toq3 = {layers_0, half_offset, layers_0, half_offset, half_extent};
3266
3267 auto cb = m_commandBuffer->handle();
3268 auto copy_general = [cb](const VkImageObj &from, const VkImageObj &to, const VkImageCopy &region) {
3269 vk::CmdCopyImage(cb, from.handle(), VK_IMAGE_LAYOUT_GENERAL, to.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
3270 };
3271
3272 auto set_layouts = [this, &image_a, &image_b, &image_c]() {
3273 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
3274 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
3275 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
3276 };
3277
John Zulaufdd462092020-12-18 12:00:35 -07003278 // Scope check. One access in, one access not
John Zulauf025ee442020-12-15 11:44:19 -07003279 m_commandBuffer->begin();
3280 set_layouts();
John Zulauf025ee442020-12-15 11:44:19 -07003281 copy_general(image_a, image_b, full_region);
3282 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3283 copy_general(image_a, image_c, region_0_q3toq3);
3284 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3285 nullptr, 0, nullptr);
3286 copy_general(image_c, image_a, region_0_q0toq0);
John Zulauf025ee442020-12-15 11:44:19 -07003287 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
3288 copy_general(image_c, image_a, region_0_q0toq3);
3289 m_errorMonitor->VerifyFound();
3290 m_commandBuffer->end();
3291
3292 // WAR prevented
3293 m_commandBuffer->reset();
3294 m_commandBuffer->begin();
3295 set_layouts();
John Zulauf025ee442020-12-15 11:44:19 -07003296 copy_general(image_a, image_b, full_region);
3297 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3298 // Just protect against WAR, only need a sync barrier.
3299 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3300 nullptr, 0, nullptr);
3301 copy_general(image_c, image_a, full_region);
John Zulauf025ee442020-12-15 11:44:19 -07003302
3303 // Wait shouldn't prevent this WAW though, as it's only a synchronization barrier
3304 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3305 copy_general(image_c, image_b, full_region);
3306 m_errorMonitor->VerifyFound();
3307 m_commandBuffer->end();
3308
3309 // Prevent WAR and WAW
3310 m_commandBuffer->reset();
3311 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003312 set_layouts();
3313 copy_general(image_a, image_b, full_region);
3314 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003315 auto mem_barrier_waw = LvlInitStruct<VkMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003316 mem_barrier_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3317 mem_barrier_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3318 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 1,
3319 &mem_barrier_waw, 0, nullptr, 0, nullptr);
3320 // The WAW should be safe (on a memory barrier)
3321 copy_general(image_c, image_b, full_region);
3322 // The WAR should also be safe (on a sync barrier)
3323 copy_general(image_c, image_a, full_region);
John Zulauf025ee442020-12-15 11:44:19 -07003324 m_commandBuffer->end();
3325
3326 // Barrier range check for WAW
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003327 auto image_barrier_region0_waw = LvlInitStruct<VkImageMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003328 image_barrier_region0_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3329 image_barrier_region0_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3330 image_barrier_region0_waw.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
3331 image_barrier_region0_waw.newLayout = VK_IMAGE_LAYOUT_GENERAL;
3332 image_barrier_region0_waw.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
3333 image_barrier_region0_waw.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
3334 image_barrier_region0_waw.image = image_b.handle();
3335 image_barrier_region0_waw.subresourceRange = layers_0_subresource_range;
3336
3337 // Region 0 safe, back WAW
3338 m_commandBuffer->reset();
3339 m_commandBuffer->begin();
3340 set_layouts();
John Zulauf025ee442020-12-15 11:44:19 -07003341 copy_general(image_a, image_b, full_region);
3342 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3343 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3344 nullptr, 1, &image_barrier_region0_waw);
3345 copy_general(image_a, image_b, region_0_to_0);
John Zulauf025ee442020-12-15 11:44:19 -07003346 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3347 copy_general(image_a, image_b, region_1_to_1);
3348 m_errorMonitor->VerifyFound();
3349 m_commandBuffer->end();
3350}
John Zulauf4b5e4632020-12-15 11:48:59 -07003351
3352TEST_F(VkSyncValTest, SyncEventsCommandHazards) {
3353 TEST_DESCRIPTION("Check Set/Reset/Wait command hazard checking");
3354 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3355 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3356
3357 VkEventObj event;
3358 event.init(*m_device, VkEventObj::create_info(0));
3359
3360 const VkEvent event_handle = event.handle();
3361
3362 m_commandBuffer->begin();
John Zulauf4b5e4632020-12-15 11:48:59 -07003363 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003364
John Zulauf4edde622021-02-15 08:54:50 -07003365 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResetEvent-event-03834");
John Zulauf4b5e4632020-12-15 11:48:59 -07003366 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3367 nullptr, 0, nullptr);
3368 m_errorMonitor->VerifyFound();
John Zulauf4b5e4632020-12-15 11:48:59 -07003369 m_commandBuffer->end();
3370
3371 m_commandBuffer->begin();
3372 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3373 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, nullptr,
3374 0, nullptr, 0, nullptr);
John Zulauf4b5e4632020-12-15 11:48:59 -07003375 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdResetEvent-missingbarrier-wait");
3376 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3377 m_errorMonitor->VerifyFound();
John Zulauf4b5e4632020-12-15 11:48:59 -07003378 m_commandBuffer->end();
3379
3380 m_commandBuffer->begin();
3381 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003382 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdSetEvent-missingbarrier-reset");
3383 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3384 m_errorMonitor->VerifyFound();
3385
John Zulauf4b5e4632020-12-15 11:48:59 -07003386 m_commandBuffer->PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0U, 0, nullptr, 0,
3387 nullptr, 0, nullptr);
3388 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3389 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3390 nullptr, 0, nullptr);
3391 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3392 m_commandBuffer->PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0U, 0, nullptr, 0,
3393 nullptr, 0, nullptr);
3394 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003395
3396 // Need a barrier between set and a reset
3397 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdResetEvent-missingbarrier-set");
3398 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3399 m_errorMonitor->VerifyFound();
John Zulauf4b5e4632020-12-15 11:48:59 -07003400 m_commandBuffer->end();
3401
3402 m_commandBuffer->begin();
3403 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003404 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdSetEvent-missingbarrier-set");
3405 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3406 m_errorMonitor->VerifyFound();
3407
3408 m_commandBuffer->end();
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003409
3410 // Secondary command buffer events tests
3411 const auto cb = m_commandBuffer->handle();
3412 VkBufferObj buffer_a;
3413 VkBufferObj buffer_b;
3414 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3415 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
3416 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
3417
3418 VkBufferCopy front2front = {0, 0, 128};
3419
3420 // Barrier range check for WAW
3421 auto buffer_barrier_front_waw = LvlInitStruct<VkBufferMemoryBarrier>();
3422 buffer_barrier_front_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3423 buffer_barrier_front_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3424 buffer_barrier_front_waw.buffer = buffer_b.handle();
3425 buffer_barrier_front_waw.offset = front2front.dstOffset;
3426 buffer_barrier_front_waw.size = front2front.size;
3427
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003428 VkCommandBufferObj secondary_cb1(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
3429 VkCommandBuffer scb1 = secondary_cb1.handle();
3430 secondary_cb1.begin();
3431 secondary_cb1.WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 1,
3432 &buffer_barrier_front_waw, 0, nullptr);
3433 vk::CmdCopyBuffer(scb1, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
3434 secondary_cb1.end();
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003435
3436 // One secondary cb hazarding with primary
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003437 m_commandBuffer->reset();
3438 m_commandBuffer->begin();
3439 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003440 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3441 vk::CmdExecuteCommands(cb, 1, &scb1);
3442 m_errorMonitor->VerifyFound();
3443 m_commandBuffer->end();
3444
3445 // One secondary cb sharing event with primary
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003446 m_commandBuffer->reset();
3447 m_commandBuffer->begin();
3448 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
3449 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3450 vk::CmdExecuteCommands(cb, 1, &scb1);
3451 m_commandBuffer->end();
John Zulauf4b5e4632020-12-15 11:48:59 -07003452}
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003453
3454TEST_F(VkLayerTest, CmdWaitEvents2KHRUsedButSynchronizaion2Disabled) {
3455 TEST_DESCRIPTION("Using CmdWaitEvents2KHR when synchronization2 is not enabled");
Tony-LunarG53b72e52021-11-19 10:04:40 -07003456 SetTargetApiVersion(VK_API_VERSION_1_3);
sfricke-samsung6fc3e322022-02-15 22:41:29 -08003457
Tony-LunarGdf960d42022-01-27 16:13:34 -07003458 AddRequiredExtensions(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003459 ASSERT_NO_FATAL_FAILURE(InitFramework());
sjfricked700bc02022-05-30 16:35:06 +09003460 if (!AreRequiredExtensionsEnabled()) {
3461 GTEST_SKIP() << RequiredExtensionsNotSupported() << " not supported";
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003462 }
sjfricked8e01c52022-07-06 14:09:04 +09003463 ASSERT_NO_FATAL_FAILURE(InitState());
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003464
Tony-LunarG53b72e52021-11-19 10:04:40 -07003465 bool vulkan_13 = (DeviceValidationVersion() >= VK_API_VERSION_1_3);
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003466 auto fpCmdWaitEvents2KHR = (PFN_vkCmdWaitEvents2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdWaitEvents2KHR");
3467
3468 VkEventObj event;
3469 event.init(*m_device, VkEventObj::create_info(0));
3470 VkEvent event_handle = event.handle();
3471
3472 VkDependencyInfoKHR dependency_info = LvlInitStruct<VkDependencyInfoKHR>();
3473
3474 m_commandBuffer->begin();
Tony-LunarG279601c2021-11-16 10:50:51 -07003475 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWaitEvents2-synchronization2-03836");
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003476 fpCmdWaitEvents2KHR(m_commandBuffer->handle(), 1, &event_handle, &dependency_info);
3477 m_errorMonitor->VerifyFound();
Tony-LunarG53b72e52021-11-19 10:04:40 -07003478 if (vulkan_13) {
3479 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWaitEvents2-synchronization2-03836");
3480 vk::CmdWaitEvents2(m_commandBuffer->handle(), 1, &event_handle, &dependency_info);
3481 m_errorMonitor->VerifyFound();
3482 }
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003483 m_commandBuffer->end();
3484}
ziga-lunarg15f450d2021-08-26 23:10:05 +02003485
3486TEST_F(VkLayerTest, Sync2FeatureDisabled) {
3487 TEST_DESCRIPTION("Call sync2 functions when the feature is disabled");
3488
Tony-LunarG53b72e52021-11-19 10:04:40 -07003489 SetTargetApiVersion(VK_API_VERSION_1_3);
Tony-LunarGdf960d42022-01-27 16:13:34 -07003490 AddRequiredExtensions(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
ziga-lunarg15f450d2021-08-26 23:10:05 +02003491 ASSERT_NO_FATAL_FAILURE(InitFramework());
sjfricked700bc02022-05-30 16:35:06 +09003492 if (!AreRequiredExtensionsEnabled()) {
3493 GTEST_SKIP() << RequiredExtensionsNotSupported() << " not supported";
ziga-lunarg15f450d2021-08-26 23:10:05 +02003494 }
3495
3496 ASSERT_NO_FATAL_FAILURE(InitState());
3497
Tony-LunarG53b72e52021-11-19 10:04:40 -07003498 bool vulkan_13 = (DeviceValidationVersion() >= VK_API_VERSION_1_3);
ziga-lunarg15f450d2021-08-26 23:10:05 +02003499 VkPhysicalDeviceSynchronization2FeaturesKHR synchronization2 = LvlInitStruct<VkPhysicalDeviceSynchronization2FeaturesKHR>();
3500 synchronization2.synchronization2 = VK_FALSE; // Invalid
3501 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&synchronization2);
3502 vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
3503
3504 auto vkCmdPipelineBarrier2KHR =
3505 (PFN_vkCmdPipelineBarrier2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPipelineBarrier2KHR");
3506 auto vkCmdResetEvent2KHR = (PFN_vkCmdResetEvent2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdResetEvent2KHR");
3507 auto vkCmdSetEvent2KHR = (PFN_vkCmdSetEvent2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdSetEvent2KHR");
3508 auto vkCmdWriteTimestamp2KHR =
3509 (PFN_vkCmdWriteTimestamp2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdWriteTimestamp2KHR");
3510
3511 bool timestamp = false;
3512
3513 uint32_t queue_count;
3514 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, NULL);
3515 std::vector<VkQueueFamilyProperties> queue_props(queue_count);
3516 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, queue_props.data());
3517 if (queue_props[m_device->graphics_queue_node_index_].timestampValidBits > 0) {
3518 timestamp = true;
3519 }
3520
3521 m_commandBuffer->begin();
3522
3523 VkDependencyInfoKHR dependency_info = LvlInitStruct<VkDependencyInfoKHR>();
3524
Tony-LunarG279601c2021-11-16 10:50:51 -07003525 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdPipelineBarrier2-synchronization2-03848");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003526 vkCmdPipelineBarrier2KHR(m_commandBuffer->handle(), &dependency_info);
3527 m_errorMonitor->VerifyFound();
3528
3529 VkEventCreateInfo eci = LvlInitStruct<VkEventCreateInfo>();
3530 vk_testing::Event event;
3531 event.init(*m_device, eci);
3532
3533 VkPipelineStageFlagBits2KHR stage = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR;
3534
Tony-LunarG279601c2021-11-16 10:50:51 -07003535 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdResetEvent2-synchronization2-03829");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003536 vkCmdResetEvent2KHR(m_commandBuffer->handle(), event.handle(), stage);
3537 m_errorMonitor->VerifyFound();
3538
Tony-LunarG279601c2021-11-16 10:50:51 -07003539 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdSetEvent2-synchronization2-03824");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003540 vkCmdSetEvent2KHR(m_commandBuffer->handle(), event.handle(), &dependency_info);
3541 m_errorMonitor->VerifyFound();
3542
3543 if (timestamp) {
3544 VkQueryPoolCreateInfo qpci = LvlInitStruct<VkQueryPoolCreateInfo>();
3545 qpci.queryType = VK_QUERY_TYPE_TIMESTAMP;
3546 qpci.queryCount = 1;
3547
3548 vk_testing::QueryPool query_pool;
3549 query_pool.init(*m_device, qpci);
3550
Tony-LunarG279601c2021-11-16 10:50:51 -07003551 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWriteTimestamp2-synchronization2-03858");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003552 vkCmdWriteTimestamp2KHR(m_commandBuffer->handle(), stage, query_pool.handle(), 0);
3553 m_errorMonitor->VerifyFound();
Tony-LunarG53b72e52021-11-19 10:04:40 -07003554 if (vulkan_13) {
3555 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWriteTimestamp2-synchronization2-03858");
3556 vk::CmdWriteTimestamp2(m_commandBuffer->handle(), stage, query_pool.handle(), 0);
3557 m_errorMonitor->VerifyFound();
3558 }
3559 }
3560 if (vulkan_13) {
3561 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdPipelineBarrier2-synchronization2-03848");
3562 vk::CmdPipelineBarrier2(m_commandBuffer->handle(), &dependency_info);
3563 m_errorMonitor->VerifyFound();
3564
3565 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdResetEvent2-synchronization2-03829");
3566 vk::CmdResetEvent2(m_commandBuffer->handle(), event.handle(), stage);
3567 m_errorMonitor->VerifyFound();
3568
3569 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdSetEvent2-synchronization2-03824");
3570 vk::CmdSetEvent2(m_commandBuffer->handle(), event.handle(), &dependency_info);
3571 m_errorMonitor->VerifyFound();
ziga-lunarg15f450d2021-08-26 23:10:05 +02003572 }
3573
3574 m_commandBuffer->end();
3575}
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003576
3577TEST_F(VkSyncValTest, DestroyedUnusedDescriptors) {
3578 TEST_DESCRIPTION("Verify unused descriptors are ignored and don't crash syncval if they've been destroyed.");
3579 SetTargetApiVersion(VK_API_VERSION_1_1);
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003580 AddRequiredExtensions(VK_KHR_MAINTENANCE_3_EXTENSION_NAME);
3581 AddRequiredExtensions(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
3582
3583 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3584
sjfricked700bc02022-05-30 16:35:06 +09003585 if (!AreRequiredExtensionsEnabled()) {
3586 GTEST_SKIP() << RequiredExtensionsNotSupported() << " not supported";
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003587 }
3588
3589 auto indexing_features = LvlInitStruct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
3590 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>();
3591 features2.pNext = &indexing_features;
3592
3593 auto vkGetPhysicalDeviceFeatures2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures2KHR>(
3594 vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR"));
3595 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
3596
3597 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
3598 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
3599 if (!indexing_features.descriptorBindingPartiallyBound) {
3600 printf("%s Partially bound bindings not supported, skipping test\n", kSkipPrefix);
3601 return;
3602 }
3603 if (!indexing_features.descriptorBindingUpdateUnusedWhilePending) {
3604 printf("%s Updating unused while pending is not supported, skipping test\n", kSkipPrefix);
3605 return;
3606 }
3607
3608 ASSERT_NO_FATAL_FAILURE(InitViewport());
3609 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003610
sfricke-samsung6fc3e322022-02-15 22:41:29 -08003611 VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags =
3612 LvlInitStruct<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>();
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003613 constexpr size_t kNumDescriptors = 6;
3614
3615 std::array<VkDescriptorBindingFlagsEXT, kNumDescriptors> ds_binding_flags;
3616 for (auto &elem : ds_binding_flags) {
3617 elem = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT;
3618 }
3619
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003620 layout_createinfo_binding_flags.bindingCount = ds_binding_flags.size();
3621 layout_createinfo_binding_flags.pBindingFlags = ds_binding_flags.data();
3622
3623 // Prepare descriptors
3624 OneOffDescriptorSet descriptor_set(m_device,
3625 {
3626 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
3627 {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
3628 {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
3629 {3, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
3630 {4, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
3631 {5, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
3632 },
3633 0, &layout_createinfo_binding_flags, 0);
3634 const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
3635 uint32_t qfi = 0;
3636 auto buffer_create_info = LvlInitStruct<VkBufferCreateInfo>();
3637 buffer_create_info.size = 32;
3638 buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
3639 buffer_create_info.queueFamilyIndexCount = 1;
3640 buffer_create_info.pQueueFamilyIndices = &qfi;
3641
3642 VkBufferObj doit_buffer;
3643 doit_buffer.init(*m_device, buffer_create_info);
3644
3645 auto buffer = layer_data::make_unique<VkBufferObj>();
3646 buffer->init(*m_device, buffer_create_info);
3647
3648 VkDescriptorBufferInfo buffer_info[2] = {};
3649 buffer_info[0].buffer = doit_buffer.handle();
3650 buffer_info[0].offset = 0;
3651 buffer_info[0].range = sizeof(uint32_t);
3652 buffer_info[1].buffer = buffer->handle();
3653 buffer_info[1].offset = 0;
3654 buffer_info[1].range = sizeof(uint32_t);
3655
3656 VkBufferObj texel_buffer;
3657 buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
3658 texel_buffer.init(*m_device, buffer_create_info);
3659
3660 auto bvci = LvlInitStruct<VkBufferViewCreateInfo>();
3661 bvci.buffer = texel_buffer.handle();
3662 bvci.format = VK_FORMAT_R32_SFLOAT;
3663 bvci.offset = 0;
3664 bvci.range = VK_WHOLE_SIZE;
3665
3666 auto texel_bufferview = layer_data::make_unique<vk_testing::BufferView>();
3667 texel_bufferview->init(*m_device, bvci);
3668
3669 auto index_buffer_create_info = LvlInitStruct<VkBufferCreateInfo>();
3670 index_buffer_create_info.size = sizeof(uint32_t);
3671 index_buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
3672 VkBufferObj index_buffer;
3673 index_buffer.init(*m_device, index_buffer_create_info);
3674
3675 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3676 VkImageObj sampled_image(m_device);
3677 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
3678 sampled_image.Init(image_ci);
3679 auto sampled_view = layer_data::make_unique<vk_testing::ImageView>();
3680 auto imageview_ci = SafeSaneImageViewCreateInfo(sampled_image, format, VK_IMAGE_ASPECT_COLOR_BIT);
3681 sampled_view->init(*m_device, imageview_ci);
3682
3683 VkImageObj combined_image(m_device);
3684 image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
3685 combined_image.Init(image_ci);
3686 imageview_ci = SafeSaneImageViewCreateInfo(combined_image, format, VK_IMAGE_ASPECT_COLOR_BIT);
3687 auto combined_view = layer_data::make_unique<vk_testing::ImageView>();
3688 combined_view->init(*m_device, imageview_ci);
3689
3690 vk_testing::Sampler sampler;
3691 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
3692 sampler.init(*m_device, sampler_ci);
3693
3694 VkDescriptorImageInfo image_info[3] = {};
3695 image_info[0].sampler = sampler.handle();
3696 image_info[0].imageView = VK_NULL_HANDLE;
3697 image_info[0].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3698 image_info[1].sampler = VK_NULL_HANDLE;
3699 image_info[1].imageView = sampled_view->handle();
3700 image_info[1].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3701 image_info[2].sampler = sampler.handle();
3702 image_info[2].imageView = combined_view->handle();
3703 image_info[2].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3704
3705 // Update all descriptors
3706 std::array<VkWriteDescriptorSet, kNumDescriptors> descriptor_writes;
3707 descriptor_writes[0] = LvlInitStruct<VkWriteDescriptorSet>();
3708 descriptor_writes[0].dstSet = descriptor_set.set_;
3709 descriptor_writes[0].dstBinding = 0;
3710 descriptor_writes[0].descriptorCount = 1;
3711 descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
3712 descriptor_writes[0].pBufferInfo = &buffer_info[0];
3713
3714 descriptor_writes[1] = LvlInitStruct<VkWriteDescriptorSet>();
3715 descriptor_writes[1].dstSet = descriptor_set.set_;
3716 descriptor_writes[1].dstBinding = 1;
3717 descriptor_writes[1].descriptorCount = 1;
3718 descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
3719 descriptor_writes[1].pBufferInfo = &buffer_info[1];
3720
3721 descriptor_writes[2] = LvlInitStruct<VkWriteDescriptorSet>();
3722 descriptor_writes[2].dstSet = descriptor_set.set_;
3723 descriptor_writes[2].dstBinding = 2;
3724 descriptor_writes[2].descriptorCount = 1;
3725 descriptor_writes[2].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
3726 descriptor_writes[2].pTexelBufferView = &texel_bufferview->handle();
3727
3728 descriptor_writes[3] = LvlInitStruct<VkWriteDescriptorSet>();
3729 descriptor_writes[3].dstSet = descriptor_set.set_;
3730 descriptor_writes[3].dstBinding = 3;
3731 descriptor_writes[3].descriptorCount = 1;
3732 descriptor_writes[3].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
3733 descriptor_writes[3].pImageInfo = &image_info[0];
3734
3735 descriptor_writes[4] = LvlInitStruct<VkWriteDescriptorSet>();
3736 descriptor_writes[4].dstSet = descriptor_set.set_;
3737 descriptor_writes[4].dstBinding = 4;
3738 descriptor_writes[4].descriptorCount = 1;
3739 descriptor_writes[4].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
3740 descriptor_writes[4].pImageInfo = &image_info[1];
3741
3742 descriptor_writes[5] = LvlInitStruct<VkWriteDescriptorSet>();
3743 descriptor_writes[5].dstSet = descriptor_set.set_;
3744 descriptor_writes[5].dstBinding = 5;
3745 descriptor_writes[5].descriptorCount = 1;
3746 descriptor_writes[5].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
3747 descriptor_writes[5].pImageInfo = &image_info[2];
3748
3749 vk::UpdateDescriptorSets(m_device->device(), descriptor_writes.size(), descriptor_writes.data(), 0, NULL);
3750
3751 // only descriptor 0 is used, the rest are going to get destroyed
3752 char const *shader_source = R"glsl(
3753 #version 450
3754 layout(set = 0, binding = 0) uniform foo_0 { int val; } doit;
3755 layout(set = 0, binding = 1) uniform foo_1 { int val; } readit;
3756 layout(set = 0, binding = 2) uniform samplerBuffer texels;
3757 layout(set = 0, binding = 3) uniform sampler samp;
3758 layout(set = 0, binding = 4) uniform texture2D img;
3759 layout(set = 0, binding = 5) uniform sampler2D sampled_image;
3760
3761 void main() {
3762 vec4 x;
3763 vec4 y;
3764 vec4 z;
3765 if (doit.val == 0) {
3766 gl_Position = vec4(0.0);
3767 x = vec4(0.0);
3768 y = vec4(0.0);
3769 z = vec4(0.0);
3770 } else {
3771 gl_Position = vec4(readit.val);
3772 x = texelFetch(texels, 5);
3773 y = texture(sampler2D(img, samp), vec2(0));
3774 z = texture(sampled_image, vec2(0));
3775 }
3776 }
3777 )glsl";
3778
3779 VkShaderObj vs(this, shader_source, VK_SHADER_STAGE_VERTEX_BIT);
3780 VkPipelineObj pipe(m_device);
3781 pipe.AddShader(&vs);
3782 pipe.AddDefaultColorAttachment();
3783 pipe.CreateVKPipeline(pipeline_layout.handle(), m_renderPass);
sfricke-samsung6fc3e322022-02-15 22:41:29 -08003784 VkCommandBufferBeginInfo begin_info = LvlInitStruct<VkCommandBufferBeginInfo>();
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003785 m_commandBuffer->begin(&begin_info);
3786 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
3787 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
3788 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
3789 &descriptor_set.set_, 0, nullptr);
3790
3791 // destroy resources for the unused descriptors
3792 buffer.reset();
3793 texel_bufferview.reset();
3794 sampled_view.reset();
3795 combined_view.reset();
3796
3797 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), index_buffer.handle(), 0, VK_INDEX_TYPE_UINT32);
3798 VkViewport viewport = {0, 0, 16, 16, 0, 1};
3799 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
3800 VkRect2D scissor = {{0, 0}, {16, 16}};
3801 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
3802 vk::CmdDrawIndexed(m_commandBuffer->handle(), 1, 1, 0, 0, 0);
3803 vk::CmdEndRenderPass(m_commandBuffer->handle());
3804 m_commandBuffer->end();
3805 m_commandBuffer->QueueCommandBuffer();
3806 vk::QueueWaitIdle(m_device->m_queue);
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003807}
ziga-lunargc71f1a92022-03-23 23:08:35 +01003808
ziga-lunarg26ba4b92022-03-24 16:43:03 +01003809TEST_F(VkSyncValTest, TestInvalidExternalSubpassDependency) {
3810 TEST_DESCRIPTION("Test write after write hazard with invalid external subpass dependency");
3811
3812 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3813 ASSERT_NO_FATAL_FAILURE(InitState());
3814
3815 VkSubpassDependency subpass_dependency = {};
3816 subpass_dependency.srcSubpass = 0;
3817 subpass_dependency.dstSubpass = VK_SUBPASS_EXTERNAL;
3818 subpass_dependency.srcStageMask = 0;
3819 subpass_dependency.dstStageMask = 0;
3820 subpass_dependency.srcAccessMask = 0;
3821 subpass_dependency.dstAccessMask = 0;
3822 subpass_dependency.dependencyFlags = 0;
3823
3824 VkAttachmentReference attach_ref1 = {};
3825 attach_ref1.attachment = 0;
3826 attach_ref1.layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL;
3827 VkAttachmentReference attach_ref2 = {};
3828 attach_ref2.attachment = 0;
3829 attach_ref2.layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3830
3831 VkSubpassDescription subpass_descriptions[2] = {};
3832 subpass_descriptions[0].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
3833 subpass_descriptions[0].pDepthStencilAttachment = &attach_ref1;
3834 subpass_descriptions[1].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
3835 subpass_descriptions[1].pDepthStencilAttachment = &attach_ref2;
3836
3837 VkAttachmentDescription attachment_description = {};
3838 attachment_description.format = VK_FORMAT_D32_SFLOAT;
3839 attachment_description.samples = VK_SAMPLE_COUNT_1_BIT;
3840 attachment_description.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
3841 attachment_description.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
3842 attachment_description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
3843 attachment_description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
3844 attachment_description.initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL;
3845 attachment_description.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
3846
3847 auto rp_ci = LvlInitStruct<VkRenderPassCreateInfo>();
3848 rp_ci.subpassCount = 1;
3849 rp_ci.pSubpasses = subpass_descriptions;
3850 rp_ci.attachmentCount = 1;
3851 rp_ci.pAttachments = &attachment_description;
3852 rp_ci.dependencyCount = 1;
3853 rp_ci.pDependencies = &subpass_dependency;
3854
3855 vk_testing::RenderPass render_pass;
3856 render_pass.init(*m_device, rp_ci);
3857
3858 VkClearValue clear_value = {};
3859 clear_value.color = {{0, 0, 0, 0}};
3860
3861 VkImageCreateInfo image_ci = LvlInitStruct<VkImageCreateInfo>();
3862 image_ci.imageType = VK_IMAGE_TYPE_2D;
3863 image_ci.format = VK_FORMAT_D32_SFLOAT;
3864 image_ci.extent.width = 32;
3865 image_ci.extent.height = 32;
3866 image_ci.extent.depth = 1;
3867 image_ci.mipLevels = 1;
3868 image_ci.arrayLayers = 1;
3869 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
3870 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
3871 image_ci.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
3872
3873 VkImageObj image1(m_device);
3874 image1.init(&image_ci);
3875 ASSERT_TRUE(image1.initialized());
3876
3877 vk_testing::ImageView image_view1;
3878 VkImageViewCreateInfo iv_ci = LvlInitStruct<VkImageViewCreateInfo>();
3879 iv_ci.image = image1.handle();
3880 iv_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
3881 iv_ci.format = VK_FORMAT_D32_SFLOAT;
3882 iv_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
3883 iv_ci.subresourceRange.baseMipLevel = 0;
3884 iv_ci.subresourceRange.levelCount = 1;
3885 iv_ci.subresourceRange.baseArrayLayer = 0;
3886 iv_ci.subresourceRange.layerCount = 1;
3887 image_view1.init(*m_device, iv_ci);
3888
3889 VkImageView framebuffer_attachments[1] = {image_view1.handle()};
3890
3891 auto fb_ci = LvlInitStruct<VkFramebufferCreateInfo>();
3892 fb_ci.renderPass = render_pass.handle();
3893 fb_ci.attachmentCount = 1;
3894 fb_ci.pAttachments = framebuffer_attachments;
3895 fb_ci.width = 32;
3896 fb_ci.height = 32;
3897 fb_ci.layers = 1;
3898
3899 vk_testing::Framebuffer framebuffer;
3900 framebuffer.init(*m_device, fb_ci);
3901
3902 auto rp_bi = LvlInitStruct<VkRenderPassBeginInfo>();
3903 rp_bi.renderPass = render_pass.handle();
3904 rp_bi.framebuffer = framebuffer.handle();
3905 rp_bi.renderArea.extent.width = 32;
3906 rp_bi.renderArea.extent.height = 32;
3907 rp_bi.clearValueCount = 1;
3908 rp_bi.pClearValues = &clear_value;
3909
3910 auto ds_ci = LvlInitStruct<VkPipelineDepthStencilStateCreateInfo>();
3911 ds_ci.depthTestEnable = VK_FALSE;
3912 ds_ci.depthWriteEnable = VK_FALSE;
3913 ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
3914
3915 CreatePipelineHelper pipe(*this);
3916 pipe.InitInfo();
3917 pipe.gp_ci_.renderPass = render_pass.handle();
3918 pipe.gp_ci_.pDepthStencilState = &ds_ci;
3919 pipe.InitState();
3920 ASSERT_VK_SUCCESS(pipe.CreateGraphicsPipeline());
3921
3922 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3923
3924 m_commandBuffer->begin();
3925 m_commandBuffer->BeginRenderPass(rp_bi);
3926 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
3927 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3928 m_commandBuffer->EndRenderPass();
3929 m_commandBuffer->end();
3930
3931 m_errorMonitor->VerifyFound();
3932}
3933
ziga-lunargc71f1a92022-03-23 23:08:35 +01003934TEST_F(VkSyncValTest, TestCopyingToCompressedImage) {
3935 TEST_DESCRIPTION("Copy from uncompressed to compressed image with and without overlap.");
3936
3937 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3938 bool copy_commands_2 = false;
3939 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME)) {
3940 m_device_extension_names.push_back(VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME);
3941 copy_commands_2 = true;
3942 }
3943 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3944
3945 VkFormatProperties format_properties;
3946 VkFormat mp_format = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
3947 vk::GetPhysicalDeviceFormatProperties(gpu(), mp_format, &format_properties);
3948 if ((format_properties.linearTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0) {
3949 printf(
3950 "%s Device does not support VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT for VK_FORMAT_BC1_RGBA_UNORM_BLOCK, skipping test.\n",
3951 kSkipPrefix);
3952 return;
3953 }
3954
3955 VkImageObj src_image(m_device);
3956 src_image.Init(1, 1, 1, VK_FORMAT_R32G32_UINT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_LINEAR);
3957 VkImageObj dst_image(m_device);
3958 dst_image.Init(12, 4, 1, VK_FORMAT_BC1_RGBA_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_LINEAR);
3959
3960 VkImageCopy copy_regions[2] = {};
3961 copy_regions[0].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3962 copy_regions[0].srcSubresource.mipLevel = 0;
3963 copy_regions[0].srcSubresource.baseArrayLayer = 0;
3964 copy_regions[0].srcSubresource.layerCount = 1;
3965 copy_regions[0].srcOffset = {0, 0, 0};
3966 copy_regions[0].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3967 copy_regions[0].dstSubresource.mipLevel = 0;
3968 copy_regions[0].dstSubresource.baseArrayLayer = 0;
3969 copy_regions[0].dstSubresource.layerCount = 1;
3970 copy_regions[0].dstOffset = {0, 0, 0};
3971 copy_regions[0].extent = {1, 1, 1};
3972 copy_regions[1].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3973 copy_regions[1].srcSubresource.mipLevel = 0;
3974 copy_regions[1].srcSubresource.baseArrayLayer = 0;
3975 copy_regions[1].srcSubresource.layerCount = 1;
3976 copy_regions[1].srcOffset = {0, 0, 0};
3977 copy_regions[1].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3978 copy_regions[1].dstSubresource.mipLevel = 0;
3979 copy_regions[1].dstSubresource.baseArrayLayer = 0;
3980 copy_regions[1].dstSubresource.layerCount = 1;
3981 copy_regions[1].dstOffset = {4, 0, 0};
3982 copy_regions[1].extent = {1, 1, 1};
3983
3984 m_commandBuffer->begin();
3985
ziga-lunargc71f1a92022-03-23 23:08:35 +01003986 vk::CmdCopyImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(),
3987 VK_IMAGE_LAYOUT_GENERAL, 1, &copy_regions[0]);
3988 vk::CmdCopyImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(),
3989 VK_IMAGE_LAYOUT_GENERAL, 1, &copy_regions[1]);
ziga-lunargc71f1a92022-03-23 23:08:35 +01003990 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3991 copy_regions[1].dstOffset = {7, 0, 0};
3992 vk::CmdCopyImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(),
3993 VK_IMAGE_LAYOUT_GENERAL, 1, &copy_regions[1]);
3994 m_errorMonitor->VerifyFound();
3995
3996 m_commandBuffer->end();
3997
3998 if (copy_commands_2) {
3999 auto vkCmdCopyImage2KHR =
4000 reinterpret_cast<PFN_vkCmdCopyImage2KHR>(vk::GetInstanceProcAddr(instance(), "vkCmdCopyImage2KHR"));
4001 assert(vkCmdCopyImage2KHR != nullptr);
4002
4003 m_commandBuffer->reset();
4004
4005 VkImageCopy2KHR copy_regions2[2];
4006 copy_regions2[0] = LvlInitStruct<VkImageCopy2KHR>();
4007 copy_regions2[0].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4008 copy_regions2[0].srcSubresource.mipLevel = 0;
4009 copy_regions2[0].srcSubresource.baseArrayLayer = 0;
4010 copy_regions2[0].srcSubresource.layerCount = 1;
4011 copy_regions2[0].srcOffset = {0, 0, 0};
4012 copy_regions2[0].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4013 copy_regions2[0].dstSubresource.mipLevel = 0;
4014 copy_regions2[0].dstSubresource.baseArrayLayer = 0;
4015 copy_regions2[0].dstSubresource.layerCount = 1;
4016 copy_regions2[0].dstOffset = {0, 0, 0};
4017 copy_regions2[0].extent = {1, 1, 1};
4018 copy_regions2[1] = LvlInitStruct<VkImageCopy2KHR>();
4019 copy_regions2[1].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4020 copy_regions2[1].srcSubresource.mipLevel = 0;
4021 copy_regions2[1].srcSubresource.baseArrayLayer = 0;
4022 copy_regions2[1].srcSubresource.layerCount = 1;
4023 copy_regions2[1].srcOffset = {0, 0, 0};
4024 copy_regions2[1].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4025 copy_regions2[1].dstSubresource.mipLevel = 0;
4026 copy_regions2[1].dstSubresource.baseArrayLayer = 0;
4027 copy_regions2[1].dstSubresource.layerCount = 1;
4028 copy_regions2[1].dstOffset = {4, 0, 0};
4029 copy_regions2[1].extent = {1, 1, 1};
4030
4031 auto copy_image_info = LvlInitStruct<VkCopyImageInfo2KHR>();
4032 copy_image_info.srcImage = src_image.handle();
4033 copy_image_info.srcImageLayout = VK_IMAGE_LAYOUT_GENERAL;
4034 copy_image_info.dstImage = dst_image.handle();
4035 copy_image_info.dstImageLayout = VK_IMAGE_LAYOUT_GENERAL;
4036 copy_image_info.regionCount = 2;
4037 copy_image_info.pRegions = copy_regions2;
4038
4039 m_commandBuffer->begin();
4040
ziga-lunargc71f1a92022-03-23 23:08:35 +01004041 vkCmdCopyImage2KHR(m_commandBuffer->handle(), &copy_image_info);
ziga-lunargc71f1a92022-03-23 23:08:35 +01004042 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4043 copy_image_info.regionCount = 1;
4044 copy_image_info.pRegions = &copy_regions2[1];
4045 copy_regions[1].dstOffset = {7, 0, 0};
4046 vkCmdCopyImage2KHR(m_commandBuffer->handle(), &copy_image_info);
4047 m_errorMonitor->VerifyFound();
4048
4049 m_commandBuffer->end();
4050 }
4051}
John Zulaufd79e34f2022-04-20 16:39:59 -06004052
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004053TEST_F(VkSyncValTest, StageAccessExpansion) {
4054 SetTargetApiVersion(VK_API_VERSION_1_2);
4055
4056 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
4057 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4058 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
4059
4060 VkImageUsageFlags image_usage_combine = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT |
4061 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4062 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
4063 VkImageObj image_c_a(m_device), image_c_b(m_device);
4064 const auto image_c_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_combine, VK_IMAGE_TILING_OPTIMAL);
4065 image_c_a.Init(image_c_ci);
4066 image_c_b.Init(image_c_ci);
4067
4068 VkImageView imageview_c = image_c_a.targetView(format);
4069 VkImageUsageFlags image_usage_storage =
4070 VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4071 VkImageObj image_s_a(m_device), image_s_b(m_device);
4072 const auto image_s_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_storage, VK_IMAGE_TILING_OPTIMAL);
4073 image_s_a.Init(image_s_ci);
4074 image_s_b.Init(image_s_ci);
4075 image_s_a.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4076 image_s_b.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4077
4078 VkImageView imageview_s = image_s_a.targetView(format);
4079
4080 vk_testing::Sampler sampler_s, sampler_c;
4081 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
4082 sampler_s.init(*m_device, sampler_ci);
4083 sampler_c.init(*m_device, sampler_ci);
4084
4085 VkBufferObj buffer_a, buffer_b;
4086 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4087 VkBufferUsageFlags buffer_usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT |
4088 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4089 buffer_a.init(*m_device, buffer_a.create_info(2048, buffer_usage, nullptr), mem_prop);
4090 buffer_b.init(*m_device, buffer_b.create_info(2048, buffer_usage, nullptr), mem_prop);
4091
4092 vk_testing::BufferView bufferview;
4093 auto bvci = LvlInitStruct<VkBufferViewCreateInfo>();
4094 bvci.buffer = buffer_a.handle();
4095 bvci.format = VK_FORMAT_R32_SFLOAT;
4096 bvci.offset = 0;
4097 bvci.range = VK_WHOLE_SIZE;
4098
4099 bufferview.init(*m_device, bvci);
4100
4101 OneOffDescriptorSet descriptor_set(m_device,
4102 {
4103 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
4104 {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
4105 {2, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
4106 {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
4107 });
4108
4109 descriptor_set.WriteDescriptorBufferInfo(0, buffer_a.handle(), 0, 2048);
4110 descriptor_set.WriteDescriptorImageInfo(1, imageview_c, sampler_c.handle(), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
4111 VK_IMAGE_LAYOUT_GENERAL);
4112 descriptor_set.WriteDescriptorImageInfo(2, imageview_s, sampler_s.handle(), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
4113 VK_IMAGE_LAYOUT_GENERAL);
4114 descriptor_set.WriteDescriptorBufferView(3, bufferview.handle());
4115 descriptor_set.UpdateDescriptorSets();
4116
4117 // Dispatch
4118 std::string csSource = R"glsl(
4119 #version 450
4120 layout(set=0, binding=0) uniform foo { float x; } ub0;
4121 layout(set=0, binding=1) uniform sampler2D cis1;
4122 layout(set=0, binding=2, rgba8) uniform readonly image2D si2;
4123 layout(set=0, binding=3, r32f) uniform readonly imageBuffer stb3;
4124 void main(){
4125 vec4 vColor4;
4126 vColor4.x = ub0.x;
4127 vColor4 = texture(cis1, vec2(0));
4128 vColor4 = imageLoad(si2, ivec2(0));
4129 vColor4 = imageLoad(stb3, 0);
4130 }
4131 )glsl";
4132
4133 // Draw
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004134 const float vbo_data[3] = {1.f, 0.f, 1.f};
4135 VkVertexInputAttributeDescription VertexInputAttributeDescription = {0, 0, VK_FORMAT_R32G32B32_SFLOAT, sizeof(vbo_data)};
4136 VkVertexInputBindingDescription VertexInputBindingDescription = {0, sizeof(vbo_data), VK_VERTEX_INPUT_RATE_VERTEX};
4137 VkBufferObj vbo, vbo2;
4138 buffer_usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4139 vbo.init(*m_device, vbo.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
4140 vbo2.init(*m_device, vbo2.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
4141
4142 VkShaderObj vs(this, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT);
Nathaniel Cesario2c8e1942022-06-21 09:15:19 -06004143 VkShaderObj fs(this, csSource.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT);
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004144
4145 CreatePipelineHelper g_pipe(*this);
4146 g_pipe.InitInfo();
4147 g_pipe.InitState();
4148 g_pipe.vi_ci_.pVertexBindingDescriptions = &VertexInputBindingDescription;
4149 g_pipe.vi_ci_.vertexBindingDescriptionCount = 1;
4150 g_pipe.vi_ci_.pVertexAttributeDescriptions = &VertexInputAttributeDescription;
4151 g_pipe.vi_ci_.vertexAttributeDescriptionCount = 1;
4152 g_pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
4153 g_pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
4154 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
4155
4156 m_commandBuffer->reset();
4157 m_commandBuffer->begin();
4158 VkImageSubresourceLayers layer{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
4159 VkOffset3D zero_offset{0, 0, 0};
4160 VkExtent3D full_extent{16, 16, 1};
4161 VkImageCopy image_region = {layer, zero_offset, layer, zero_offset, full_extent};
4162 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
4163 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4164 vk::CmdCopyImage(m_commandBuffer->handle(), image_s_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s_a.handle(),
4165 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4166
4167 auto barrier = LvlInitStruct<VkMemoryBarrier>();
4168 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
4169 barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
4170
4171 // wrong: dst stage should be VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
4172 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 1,
4173 &barrier, 0, nullptr, 0, nullptr);
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004174
4175 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
4176 VkDeviceSize offset = 0;
4177 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
4178
4179 VkViewport viewport = {0, 0, 16, 16, 0, 1};
4180 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
4181 VkRect2D scissor = {{0, 0}, {16, 16}};
4182 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
4183
4184 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
4185 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
4186 &descriptor_set.set_, 0, nullptr);
4187
4188 // one error for each image copied above
4189 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
4190 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
4191 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
4192 m_errorMonitor->VerifyFound();
4193
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004194 m_commandBuffer->EndRenderPass();
4195 m_commandBuffer->end();
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004196
4197 // Try again with the correct dst stage on the barrier
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004198 m_commandBuffer->reset();
4199 m_commandBuffer->begin();
4200 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
4201 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4202 vk::CmdCopyImage(m_commandBuffer->handle(), image_s_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s_a.handle(),
4203 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4204
4205 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 1,
4206 &barrier, 0, nullptr, 0, nullptr);
4207
4208 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
4209 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
4210
4211 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
4212 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
4213
4214 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
4215 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
4216 &descriptor_set.set_, 0, nullptr);
4217 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
4218 m_commandBuffer->EndRenderPass();
4219 m_commandBuffer->end();
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004220}
4221
John Zulaufb66ee052022-06-10 16:52:28 -06004222struct QSTestContext {
John Zulaufc55f4702022-07-15 12:16:34 -06004223 VkDeviceObj* dev;
John Zulaufb66ee052022-06-10 16:52:28 -06004224 uint32_t q_fam = ~0U;
John Zulauf6df2d5c2022-05-28 13:02:21 -06004225 VkQueue q0 = VK_NULL_HANDLE;
4226 VkQueue q1 = VK_NULL_HANDLE;
John Zulaufb66ee052022-06-10 16:52:28 -06004227
4228 VkBufferObj buffer_a;
4229 VkBufferObj buffer_b;
4230 VkBufferObj buffer_c;
4231
4232 VkBufferCopy region;
4233 VkCommandPoolObj pool;
4234
4235 VkCommandBufferObj cba;
4236 VkCommandBufferObj cbb;
4237 VkCommandBufferObj cbc;
4238
4239 VkCommandBuffer h_cba = VK_NULL_HANDLE;
4240 VkCommandBuffer h_cbb = VK_NULL_HANDLE;
4241 VkCommandBuffer h_cbc = VK_NULL_HANDLE;
4242
4243 vk_testing::Semaphore semaphore;
4244 vk_testing::Event event;
4245
4246 VkCommandBufferObj* current_cb = nullptr;
4247
John Zulaufc55f4702022-07-15 12:16:34 -06004248 QSTestContext(VkDeviceObj* device, VkQueueObj* force_q0 = nullptr, VkQueueObj* force_q1 = nullptr);
John Zulaufaa7ee262022-08-01 18:10:28 -06004249 VkCommandBuffer InitFromPool(VkCommandBufferObj& cb_obj);
John Zulaufb66ee052022-06-10 16:52:28 -06004250 bool Valid() const { return q1 != VK_NULL_HANDLE; }
4251
4252 void Begin(VkCommandBufferObj& cb);
4253 void BeginA() { Begin(cba); }
4254 void BeginB() { Begin(cbb); }
4255 void BeginC() { Begin(cbc); }
4256
4257 void End();
4258
4259 void CopyAToB() { vk::CmdCopyBuffer(current_cb->handle(), buffer_a.handle(), buffer_b.handle(), 1, &region); }
4260 void CopyAToC() { vk::CmdCopyBuffer(current_cb->handle(), buffer_a.handle(), buffer_c.handle(), 1, &region); }
4261
4262 void CopyBToA() { vk::CmdCopyBuffer(current_cb->handle(), buffer_b.handle(), buffer_a.handle(), 1, &region); }
4263 void CopyBToC() { vk::CmdCopyBuffer(current_cb->handle(), buffer_b.handle(), buffer_c.handle(), 1, &region); }
4264
4265 void CopyCToA() { vk::CmdCopyBuffer(current_cb->handle(), buffer_c.handle(), buffer_a.handle(), 1, &region); }
4266 void CopyCToB() { vk::CmdCopyBuffer(current_cb->handle(), buffer_c.handle(), buffer_b.handle(), 1, &region); }
4267
John Zulauf46f5d6b2022-06-30 12:38:34 -06004268 void CopyGeneral(const VkImageObj& from, const VkImageObj& to, const VkImageCopy& region) {
4269 vk::CmdCopyImage(current_cb->handle(), from.handle(), VK_IMAGE_LAYOUT_GENERAL, to.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
4270 &region);
4271 };
4272
John Zulaufb66ee052022-06-10 16:52:28 -06004273 VkBufferMemoryBarrier InitBufferBarrier(const VkBufferObj& buffer);
4274 void TransferBarrier(const VkBufferObj& buffer);
4275 void TransferBarrier(const VkBufferMemoryBarrier& buffer_barrier);
4276
John Zulaufc55f4702022-07-15 12:16:34 -06004277 void Submit(VkQueue q, VkCommandBufferObj& cb, VkSemaphore wait = VK_NULL_HANDLE,
John Zulaufaa7ee262022-08-01 18:10:28 -06004278 VkPipelineStageFlags wait_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VkSemaphore signal = VK_NULL_HANDLE,
4279 VkFence fence = VK_NULL_HANDLE);
John Zulaufb66ee052022-06-10 16:52:28 -06004280
John Zulaufc55f4702022-07-15 12:16:34 -06004281 void Submit0(VkCommandBufferObj& cb, VkSemaphore wait = VK_NULL_HANDLE,
John Zulaufaa7ee262022-08-01 18:10:28 -06004282 VkPipelineStageFlags wait_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VkSemaphore signal = VK_NULL_HANDLE,
4283 VkFence fence = VK_NULL_HANDLE) {
4284 Submit(q0, cb, wait, wait_mask, signal, fence);
John Zulaufb66ee052022-06-10 16:52:28 -06004285 }
4286 void Submit0Wait(VkCommandBufferObj& cb, VkPipelineStageFlags wait_mask) { Submit0(cb, semaphore.handle(), wait_mask); }
4287 void Submit0Signal(VkCommandBufferObj& cb) { Submit0(cb, VK_NULL_HANDLE, 0U, semaphore.handle()); }
4288
John Zulaufc55f4702022-07-15 12:16:34 -06004289 void Submit1(VkCommandBufferObj& cb, VkSemaphore wait = VK_NULL_HANDLE,
John Zulaufaa7ee262022-08-01 18:10:28 -06004290 VkPipelineStageFlags wait_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VkSemaphore signal = VK_NULL_HANDLE,
4291 VkFence fence = VK_NULL_HANDLE) {
4292 Submit(q1, cb, wait, wait_mask, signal, fence);
John Zulaufb66ee052022-06-10 16:52:28 -06004293 }
4294 void Submit1Wait(VkCommandBufferObj& cb, VkPipelineStageFlags wait_mask) { Submit1(cb, semaphore.handle(), wait_mask); }
4295 void Submit1Signal(VkCommandBufferObj& cb, VkPipelineStageFlags signal_mask) {
4296 Submit1(cb, VK_NULL_HANDLE, 0U, semaphore.handle());
4297 }
4298 void SetEvent(VkPipelineStageFlags src_mask) { event.cmd_set(*current_cb, src_mask); }
4299 void WaitEventBufferTransfer(VkBufferObj& buffer, VkPipelineStageFlags src_mask, VkPipelineStageFlags dst_mask) {
4300 std::vector<VkBufferMemoryBarrier> buffer_barriers(1, InitBufferBarrier(buffer));
4301 event.cmd_wait(*current_cb, src_mask, dst_mask, std::vector<VkMemoryBarrier>(), buffer_barriers,
4302 std::vector<VkImageMemoryBarrier>());
4303 }
John Zulaufc55f4702022-07-15 12:16:34 -06004304 void QueueWait(VkQueue q) { vk::QueueWaitIdle(q); }
4305 void QueueWait0() { QueueWait(q0); }
4306 void QueueWait1() { QueueWait(q1); }
4307 void DeviceWait() { vk::DeviceWaitIdle(dev->handle()); }
John Zulaufb66ee052022-06-10 16:52:28 -06004308};
4309
John Zulaufc55f4702022-07-15 12:16:34 -06004310QSTestContext::QSTestContext(VkDeviceObj* device, VkQueueObj* force_q0, VkQueueObj* force_q1)
4311 : dev(device), q0(VK_NULL_HANDLE), q1(VK_NULL_HANDLE) {
4312 if (force_q0) {
4313 q0 = force_q0->handle();
4314 q_fam = force_q0->get_family_index();
4315 if (force_q1) {
4316 // The object has some assumptions that the queues are from the the same family, so enforce this here
4317 if (force_q1->get_family_index() == q_fam) {
4318 q1 = force_q1->handle();
4319 }
4320 } else {
4321 q1 = q0; // Allow the two queues to be the same and valid if forced
4322 }
4323 } else {
4324 const auto& queues = device->dma_queues();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004325
John Zulaufc55f4702022-07-15 12:16:34 -06004326 const uint32_t q_count = static_cast<uint32_t>(queues.size());
4327 for (uint32_t q0_index = 0; q0_index < q_count; ++q0_index) {
4328 const auto* q0_entry = queues[q0_index];
4329 q0 = q0_entry->handle();
4330 q_fam = q0_entry->get_family_index();
4331 for (uint32_t q1_index = (q0_index + 1); q1_index < q_count; ++q1_index) {
4332 const auto* q1_entry = queues[q1_index];
4333 if (q_fam == q1_entry->get_family_index()) {
4334 q1 = q1_entry->handle();
4335 break;
4336 }
4337 }
4338 if (Valid()) {
John Zulauf6df2d5c2022-05-28 13:02:21 -06004339 break;
4340 }
4341 }
John Zulauf6df2d5c2022-05-28 13:02:21 -06004342 }
John Zulaufc55f4702022-07-15 12:16:34 -06004343
John Zulaufb66ee052022-06-10 16:52:28 -06004344 if (!Valid()) return;
4345
4346 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4347 buffer_a.init_as_src_and_dst(*device, 256, mem_prop);
4348 buffer_b.init_as_src_and_dst(*device, 256, mem_prop);
4349 buffer_c.init_as_src_and_dst(*device, 256, mem_prop);
4350
4351 region = {0, 0, 256};
4352
4353 pool.Init(device, q_fam, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004354
John Zulaufaa7ee262022-08-01 18:10:28 -06004355 h_cba = InitFromPool(cba);
4356 h_cbb = InitFromPool(cbb);
4357 h_cbc = InitFromPool(cbc);
John Zulaufb66ee052022-06-10 16:52:28 -06004358
4359 auto semaphore_ci = LvlInitStruct<VkSemaphoreCreateInfo>();
4360 semaphore.init(*device, semaphore_ci);
4361
4362 VkEventCreateInfo eci = LvlInitStruct<VkEventCreateInfo>();
4363 event.init(*device, eci);
4364}
4365
John Zulaufaa7ee262022-08-01 18:10:28 -06004366VkCommandBuffer QSTestContext::InitFromPool(VkCommandBufferObj& cb_obj) {
4367 cb_obj.Init(dev, &pool);
4368 return cb_obj.handle();
4369}
4370
John Zulaufb66ee052022-06-10 16:52:28 -06004371void QSTestContext::Begin(VkCommandBufferObj& cb) {
John Zulaufc55f4702022-07-15 12:16:34 -06004372 VkCommandBufferBeginInfo info = LvlInitStruct<VkCommandBufferBeginInfo>();
4373 info.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4374 info.pInheritanceInfo = nullptr;
4375
John Zulaufb66ee052022-06-10 16:52:28 -06004376 cb.reset();
John Zulaufc55f4702022-07-15 12:16:34 -06004377 cb.begin(&info);
John Zulaufb66ee052022-06-10 16:52:28 -06004378 current_cb = &cb;
4379}
4380
4381void QSTestContext::End() {
4382 current_cb->end();
4383 current_cb = nullptr;
4384}
4385
4386VkBufferMemoryBarrier QSTestContext::InitBufferBarrier(const VkBufferObj& buffer) {
4387 auto buffer_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
4388 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
4389 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
4390 buffer_barrier.buffer = buffer.handle();
4391 buffer_barrier.offset = 0;
4392 buffer_barrier.size = 256;
4393 return buffer_barrier;
4394}
4395
4396void QSTestContext::TransferBarrier(const VkBufferMemoryBarrier& buffer_barrier) {
4397 vk::CmdPipelineBarrier(current_cb->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1,
4398 &buffer_barrier, 0, nullptr);
4399}
4400
4401void QSTestContext::TransferBarrier(const VkBufferObj& buffer) { TransferBarrier(InitBufferBarrier(buffer)); }
4402
John Zulaufaa7ee262022-08-01 18:10:28 -06004403void QSTestContext::Submit(VkQueue q, VkCommandBufferObj& cb, VkSemaphore wait, VkPipelineStageFlags wait_mask, VkSemaphore signal,
4404 VkFence fence) {
John Zulaufb66ee052022-06-10 16:52:28 -06004405 auto submit1 = lvl_init_struct<VkSubmitInfo>();
4406 submit1.commandBufferCount = 1;
4407 VkCommandBuffer h_cb = cb.handle();
4408 submit1.pCommandBuffers = &h_cb;
4409 if (wait != VK_NULL_HANDLE) {
4410 submit1.waitSemaphoreCount = 1;
4411 submit1.pWaitSemaphores = &wait;
4412 submit1.pWaitDstStageMask = &wait_mask;
4413 }
4414 if (signal != VK_NULL_HANDLE) {
4415 submit1.signalSemaphoreCount = 1;
4416 submit1.pSignalSemaphores = &signal;
4417 }
John Zulaufaa7ee262022-08-01 18:10:28 -06004418 vk::QueueSubmit(q, 1, &submit1, fence);
John Zulaufb66ee052022-06-10 16:52:28 -06004419}
4420
John Zulaufb9fad9f2022-07-15 11:10:37 -06004421TEST_F(VkSyncValTest, SyncQSBufferCopyHazards) {
4422 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4423 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4424
4425 VkBufferObj buffer_a;
4426 VkBufferObj buffer_b;
4427 VkBufferObj buffer_c;
4428 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4429 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
4430 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
4431 buffer_c.init_as_src_and_dst(*m_device, 256, mem_prop);
4432
4433 VkBufferCopy region = {0, 0, 256};
4434
4435 VkCommandBufferObj cba(m_device, m_commandPool);
4436 VkCommandBufferObj cbb(m_device, m_commandPool);
4437
4438 cba.begin();
4439 const VkCommandBuffer h_cba = cba.handle();
4440 vk::CmdCopyBuffer(h_cba, buffer_a.handle(), buffer_b.handle(), 1, &region);
4441 cba.end();
4442
4443 const VkCommandBuffer h_cbb = cbb.handle();
4444 cbb.begin();
4445 vk::CmdCopyBuffer(h_cbb, buffer_c.handle(), buffer_a.handle(), 1, &region);
4446 cbb.end();
4447
4448 auto submit1 = lvl_init_struct<VkSubmitInfo>();
4449 submit1.commandBufferCount = 2;
4450 VkCommandBuffer two_cbs[2] = {h_cba, h_cbb};
4451 submit1.pCommandBuffers = two_cbs;
4452
4453 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
4454 vk::QueueSubmit(m_device->m_queue, 1, &submit1, VK_NULL_HANDLE);
4455 m_errorMonitor->VerifyFound();
4456
4457 vk::DeviceWaitIdle(m_device->device());
4458
4459 VkSubmitInfo submit2[2] = {lvl_init_struct<VkSubmitInfo>(), lvl_init_struct<VkSubmitInfo>()};
4460 submit2[0].commandBufferCount = 1;
4461 submit2[0].pCommandBuffers = &h_cba;
4462 submit2[1].commandBufferCount = 1;
4463 submit2[1].pCommandBuffers = &h_cbb;
4464 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
4465 vk::QueueSubmit(m_device->m_queue, 2, submit2, VK_NULL_HANDLE);
4466 m_errorMonitor->VerifyFound();
4467
4468 // With the skip settings, the above QueueSubmit's didn't record, so we can treat the global queue contexts as empty
4469 submit1.commandBufferCount = 1;
4470 submit1.pCommandBuffers = &h_cba;
4471 // Submit A
John Zulaufb9fad9f2022-07-15 11:10:37 -06004472 vk::QueueSubmit(m_device->m_queue, 1, &submit1, VK_NULL_HANDLE);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004473
4474 submit1.pCommandBuffers = &h_cbb;
4475 // Submit B -- which should conflict via the queue's "last batch"
4476 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
4477 vk::QueueSubmit(m_device->m_queue, 1, &submit1, VK_NULL_HANDLE);
4478 m_errorMonitor->VerifyFound();
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004479
4480 m_device->wait();
John Zulaufb9fad9f2022-07-15 11:10:37 -06004481}
4482
4483TEST_F(VkSyncValTest, SyncQSBufferCopyVsIdle) {
4484 // TODO (jzulauf)
John Zulaufc55f4702022-07-15 12:16:34 -06004485 // GTEST_SKIP() << "this test is causing a sporadic crash on nvidia 32b release. Skip until further investigation";
John Zulaufb9fad9f2022-07-15 11:10:37 -06004486
4487 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4488 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4489
John Zulaufc55f4702022-07-15 12:16:34 -06004490 QSTestContext test(m_device, m_device->m_queue_obj);
4491 if (!test.Valid()) {
4492 GTEST_SKIP() << "Test requires a valid queue object.";
4493 }
John Zulaufb9fad9f2022-07-15 11:10:37 -06004494
John Zulaufc55f4702022-07-15 12:16:34 -06004495 test.BeginA();
4496 test.CopyAToB();
4497 test.End();
John Zulaufb9fad9f2022-07-15 11:10:37 -06004498
John Zulaufc55f4702022-07-15 12:16:34 -06004499 test.BeginB();
4500 test.CopyCToA();
4501 test.End();
John Zulaufb9fad9f2022-07-15 11:10:37 -06004502
4503 // Submit A
John Zulaufc55f4702022-07-15 12:16:34 -06004504 test.Submit0(test.cba);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004505
4506 // Submit B which hazards vs. A
John Zulaufb9fad9f2022-07-15 11:10:37 -06004507 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004508 test.Submit0(test.cbb);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004509 m_errorMonitor->VerifyFound();
4510
4511 // With the skip settings, the above QueueSubmit's didn't record, so we can treat the previous submit as not
4512 // having happened. So we'll try again with a device wait idle
4513 // Submit B again, but after idling, which should remove the hazard
John Zulaufc55f4702022-07-15 12:16:34 -06004514 test.DeviceWait();
4515 test.Submit0(test.cbb);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004516
John Zulaufc55f4702022-07-15 12:16:34 -06004517 // Submit the same command again for another hazard
John Zulaufb9fad9f2022-07-15 11:10:37 -06004518 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
John Zulaufc55f4702022-07-15 12:16:34 -06004519 test.Submit0(test.cbb);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004520 m_errorMonitor->VerifyFound();
4521
4522 // With the skip settings, the above QueueSubmit's didn't record, so we can treat the previous submit as not
4523 // having happened. So we'll try again with a queue wait idle
4524 // Submit B again, but after idling, which should remove the hazard
John Zulaufc55f4702022-07-15 12:16:34 -06004525 test.QueueWait0();
4526 test.Submit0(test.cbb);
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004527
4528 m_device->wait();
John Zulaufb9fad9f2022-07-15 11:10:37 -06004529}
4530
John Zulaufaa7ee262022-08-01 18:10:28 -06004531TEST_F(VkSyncValTest, SyncQSBufferCopyVsFence) {
4532 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4533 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4534
4535 QSTestContext test(m_device, m_device->m_queue_obj);
4536 if (!test.Valid()) {
4537 GTEST_SKIP() << "Test requires a valid queue object.";
4538 }
4539
4540 vk_testing::Fence fence;
4541 fence.init(*m_device, VkFenceObj::create_info());
4542 VkFence fence_handle = fence.handle();
4543 VkResult wait_result;
4544 VkCommandBufferObj cbd;
4545 test.InitFromPool(cbd);
4546
4547 // Set up four CB with copy commands
4548 // We'll wait for the first, but not the second
4549 test.BeginA();
4550 test.CopyAToB();
4551 test.End();
4552
4553 test.BeginB();
4554 test.CopyAToC();
4555 test.End();
4556
4557 test.BeginC();
4558 test.CopyAToB();
4559 test.End();
4560
4561 // This is the one that should error
4562 test.Begin(cbd);
4563 test.CopyAToC();
4564 test.End();
4565
4566 // Two copies *better* finish in a second...
4567 const uint64_t kFourSeconds = 1U << 30;
4568 // Copy A to B
4569 test.Submit0(test.cba, VK_NULL_HANDLE, 0U, VK_NULL_HANDLE, fence_handle);
4570 // Copy A to C
4571 test.Submit0(test.cbb);
4572 // Wait for A to B
4573 wait_result = fence.wait(kFourSeconds);
4574
4575 if (wait_result != VK_SUCCESS) {
4576 ADD_FAILURE() << "Fence wait failed. Aborting test.";
4577 m_device->wait();
4578 }
4579
4580 // A and B should be good to go...
4581 test.Submit0(test.cbc);
4582
4583 // But C shouldn't
4584 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4585 test.Submit0(cbd);
4586 m_errorMonitor->VerifyFound();
4587
4588 test.DeviceWait();
4589}
4590
John Zulaufb66ee052022-06-10 16:52:28 -06004591TEST_F(VkSyncValTest, SyncQSBufferCopyQSORules) {
4592 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4593 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4594
4595 QSTestContext test(m_device);
4596 if (!test.Valid()) {
John Zulauf6df2d5c2022-05-28 13:02:21 -06004597 printf("%s Test requires at least 2 TRANSFER capable queues in the same queue_family. Skipped.\n", kSkipPrefix);
4598 return;
4599 }
4600
John Zulauf6df2d5c2022-05-28 13:02:21 -06004601 // Command Buffer A reads froms buffer A and writes to buffer B
John Zulaufb66ee052022-06-10 16:52:28 -06004602 test.BeginA();
4603 test.CopyAToB();
4604 test.End();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004605
4606 // Command Buffer B reads froms buffer C and writes to buffer A, but has a barrier to protect the write to A when
4607 // executed on the same queue, given that commands in "queue submission order" are within the first scope of the barrier.
John Zulaufb66ee052022-06-10 16:52:28 -06004608 test.BeginB();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004609
4610 // Use the barrier to clean up the WAR, which will work for command buffers ealier in queue submission order, or with
4611 // correct semaphore operations between queues.
John Zulaufb66ee052022-06-10 16:52:28 -06004612 test.TransferBarrier(test.buffer_a);
4613 test.CopyCToA();
4614 test.End();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004615
John Zulaufd060c3f2022-06-08 16:00:46 -06004616 // Command Buffer C does the same copy as B but without the barrier.
John Zulaufb66ee052022-06-10 16:52:28 -06004617 test.BeginC();
4618 test.CopyCToA();
4619 test.End();
John Zulaufd060c3f2022-06-08 16:00:46 -06004620
John Zulauf6df2d5c2022-05-28 13:02:21 -06004621 // Submit A and B on the same queue, to assure us the barrier *would* be sufficient given QSO
4622 // This is included in a "Sucess" section, just to verify CBA and CBB are set up correctly.
John Zulaufb66ee052022-06-10 16:52:28 -06004623 test.Submit0(test.cba);
4624 test.Submit0(test.cbb);
John Zulauf6df2d5c2022-05-28 13:02:21 -06004625 m_device->wait(); // DeviceWaitIdle, clearing the field for the next subcase
John Zulauf6df2d5c2022-05-28 13:02:21 -06004626
4627 // Submit A and B on the different queues. Since no semaphore is used between the queues, CB B hazards asynchronously with,
4628 // CB A with A being read and written on independent queues.
4629 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ");
John Zulaufb66ee052022-06-10 16:52:28 -06004630 test.Submit0(test.cba);
4631 test.Submit1(test.cbb);
John Zulauf6df2d5c2022-05-28 13:02:21 -06004632 m_errorMonitor->VerifyFound();
4633
4634 // Set up the semaphore for the next two cases
John Zulauf6df2d5c2022-05-28 13:02:21 -06004635
4636 m_device->wait();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004637
4638 // Submit A and B on the different queues, with an ineffectual semaphore. The wait mask is empty, thus nothing in CB B is in
4639 // the second excution scope of the waited signal.
4640 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufb66ee052022-06-10 16:52:28 -06004641 test.Submit0Signal(test.cba);
John Zulaufc55f4702022-07-15 12:16:34 -06004642 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT); // wait mask is BOTTOM, s.t. this is a wait-for-nothing.
John Zulauf6df2d5c2022-05-28 13:02:21 -06004643 m_errorMonitor->VerifyFound();
4644
4645 // The since second submit failed, it was skipped. So we can try again, without having to WaitDeviceIdle
John Zulaufb66ee052022-06-10 16:52:28 -06004646 // Include transfers in the second execution scope of the waited signal, s.t. the PipelineBarrier in CB B can chain with it.
4647 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_TRANSFER_BIT); //
John Zulaufd060c3f2022-06-08 16:00:46 -06004648
4649 m_device->wait();
4650
4651 // Draw A and then C to verify the second access scope of the signal
John Zulaufb66ee052022-06-10 16:52:28 -06004652 test.Submit0Signal(test.cba);
4653 test.Submit1Wait(test.cbc, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulaufd060c3f2022-06-08 16:00:46 -06004654
4655 m_device->wait();
4656
4657 // ... and again on the same queue
John Zulaufb66ee052022-06-10 16:52:28 -06004658 test.Submit0Signal(test.cba);
4659 test.Submit0Wait(test.cbc, VK_PIPELINE_STAGE_TRANSFER_BIT);
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004660
4661 m_device->wait();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004662}
John Zulaufb66ee052022-06-10 16:52:28 -06004663
4664TEST_F(VkSyncValTest, SyncQSBufferEvents) {
4665 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4666 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4667
4668 QSTestContext test(m_device);
4669 if (!test.Valid()) {
4670 printf("%s Test requires at least 2 TRANSFER capable queues in the same queue_family. Skipped.\n", kSkipPrefix);
4671 return;
4672 }
4673
John Zulaufb66ee052022-06-10 16:52:28 -06004674 // Command Buffer A reads froms buffer A and writes to buffer B
4675 test.BeginA();
4676 test.CopyAToB();
4677 test.SetEvent(VK_PIPELINE_STAGE_TRANSFER_BIT);
4678 test.End();
4679
4680 // Command Buffer B reads froms buffer C and writes to buffer A, but has a wait to protect the write to A when
4681 // executed on the same queue, given that commands in "queue submission order" are within the first scope of the barrier.
4682 test.BeginB();
4683
4684 // Use the barrier to clean up the WAR, which will work for command buffers ealier in queue submission order, or with
4685 // correct semaphore operations between queues.
4686 test.WaitEventBufferTransfer(test.buffer_a, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
4687 test.CopyCToA();
4688 test.End();
4689
4690 // Command Buffer C merges the operations from A and B, to ensure the set/wait is correct.
4691 // reads froms buffer A and writes to buffer B
4692 // reads froms buffer C and writes to buffer A, but has a barrier to protect the write to A when
4693 test.BeginC();
4694 test.CopyAToB();
4695 test.SetEvent(VK_PIPELINE_STAGE_TRANSFER_BIT);
4696 test.WaitEventBufferTransfer(test.buffer_a, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
4697 test.CopyCToA();
4698 test.End();
4699
4700 test.Submit0(test.cba);
4701 test.Submit0(test.cbb);
4702
4703 // Ensure that the wait doesn't apply to async queues
4704 m_device->wait();
4705 test.Submit0(test.cba);
John Zulaufb66ee052022-06-10 16:52:28 -06004706 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ");
4707 test.Submit1(test.cbb);
4708 m_errorMonitor->VerifyFound();
4709
4710 // Ensure that the wait doesn't apply to access on other synchronized queues
John Zulaufb66ee052022-06-10 16:52:28 -06004711 m_device->wait();
4712
4713 test.Submit0Signal(test.cba);
John Zulaufb66ee052022-06-10 16:52:28 -06004714 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004715 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004716 m_errorMonitor->VerifyFound();
4717
4718 // Need to have a successful signal wait to get the semaphore in a usuable state.
John Zulaufb66ee052022-06-10 16:52:28 -06004719 test.BeginC();
4720 test.End();
John Zulaufc55f4702022-07-15 12:16:34 -06004721 test.Submit1Wait(test.cbc, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004722 m_device->wait();
4723
4724 // Next ensure that accesses from other queues aren't included in the first scope
4725 test.BeginA();
4726 test.CopyAToB();
4727 test.End();
4728
4729 test.BeginB();
4730 test.SetEvent(VK_PIPELINE_STAGE_TRANSFER_BIT);
4731 test.WaitEventBufferTransfer(test.buffer_a, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
4732 test.CopyCToA();
4733 test.End();
4734
4735 test.Submit0Signal(test.cba);
John Zulaufb66ee052022-06-10 16:52:28 -06004736 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004737 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004738 m_errorMonitor->VerifyFound();
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004739
4740 m_device->wait();
John Zulaufb66ee052022-06-10 16:52:28 -06004741}
John Zulauf46f5d6b2022-06-30 12:38:34 -06004742
4743TEST_F(VkSyncValTest, SyncQSOBarrierHazard) {
4744 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4745 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4746
4747 QSTestContext test(m_device);
4748 if (!test.Valid()) {
4749 GTEST_SKIP() << "Test requires at least 2 TRANSFER capable queues in the same queue_family.";
4750 }
4751
4752 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4753 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
4754 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
4755
4756 VkImageObj image_a(m_device);
4757 image_a.Init(image_ci);
4758 ASSERT_TRUE(image_a.initialized());
4759 image_a.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4760
4761 VkImageObj image_b(m_device);
4762 image_b.Init(image_ci);
4763 ASSERT_TRUE(image_b.initialized());
4764 image_b.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4765
4766 VkImageSubresourceLayers all_layers{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
4767 VkOffset3D zero_offset{0, 0, 0};
4768 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
4769 VkImageCopy full_region = {all_layers, zero_offset, all_layers, zero_offset, full_extent};
4770
4771 test.BeginA();
4772 test.CopyGeneral(image_a, image_b, full_region);
4773 test.End();
4774
4775 test.BeginB();
4776 image_a.ImageMemoryBarrier(test.current_cb, VK_IMAGE_ASPECT_COLOR_BIT, VK_ACCESS_NONE, VK_ACCESS_NONE,
John Zulaufc55f4702022-07-15 12:16:34 -06004777 VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, VK_PIPELINE_STAGE_TRANSFER_BIT,
4778 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulauf46f5d6b2022-06-30 12:38:34 -06004779 test.End();
4780
4781 // We're going to do the copy first, then use the skip on fail, to test three different ways...
4782 test.Submit0Signal(test.cba);
4783
4784 // First asynchronously fail -- the pipeline barrier in B shouldn't work on queue 1
4785 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ ");
4786 test.Submit1(test.cbb);
4787 m_errorMonitor->VerifyFound();
4788
4789 // Next synchronously fail -- the pipeline barrier in B shouldn't work on queue 1
4790 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004791 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulauf46f5d6b2022-06-30 12:38:34 -06004792 m_errorMonitor->VerifyFound();
4793
4794 // Then prove qso works (note that with the failure, the semaphore hasn't been waited, nor the layout changed)
John Zulaufc55f4702022-07-15 12:16:34 -06004795 test.Submit0Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004796
4797 m_device->wait();
John Zulauf46f5d6b2022-06-30 12:38:34 -06004798}
John Zulauf2f5947d2022-07-27 15:36:31 -06004799
4800TEST_F(VkSyncValTest, SyncQSRenderPass) {
4801 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4802 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4803 if (IsPlatform(kNexusPlayer)) {
4804 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
4805 return;
4806 }
4807
John Zulauf2f5947d2022-07-27 15:36:31 -06004808 CreateRenderPassHelper rp_helper(m_device);
4809 rp_helper.InitAllAttachmentsToLayoutGeneral();
4810
4811 rp_helper.InitState();
4812 rp_helper.InitAttachmentLayouts(); // Quiet any CoreChecks ImageLayout complaints
4813 m_device->wait(); // and quiesce the system
4814
4815 // The dependency protects the input attachment but not the color attachment
4816 rp_helper.subpass_dep.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
4817 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT,
4818 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, 0U});
4819
4820 rp_helper.InitRenderPass();
4821 rp_helper.InitFramebuffer();
4822 rp_helper.InitBeginInfo();
4823
4824 VkCommandBufferObj cb0(m_device, m_commandPool);
4825 VkCommandBufferObj cb1(m_device, m_commandPool);
4826
4827 auto do_begin_rp = [&rp_helper](VkCommandBufferObj& cb_obj) { cb_obj.BeginRenderPass(rp_helper.render_pass_begin); };
4828
4829 auto do_clear = [&rp_helper](VkCommandBufferObj& cb_obj) {
4830 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
4831 vk::CmdClearColorImage(cb_obj.handle(), rp_helper.image_input->handle(), VK_IMAGE_LAYOUT_GENERAL, &rp_helper.ccv, 1,
4832 &full_subresource_range);
4833 vk::CmdClearColorImage(cb_obj.handle(), rp_helper.image_color->handle(), VK_IMAGE_LAYOUT_GENERAL, &rp_helper.ccv, 1,
4834 &full_subresource_range);
4835 };
4836
4837 // Single renderpass barrier (sanity check)
4838 cb0.begin();
4839 do_clear(cb0);
John Zulauf2f5947d2022-07-27 15:36:31 -06004840 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4841 do_begin_rp(cb0);
4842 m_errorMonitor->VerifyFound();
4843 // No "end render pass" as the begin fails
4844
John Zulauf2f5947d2022-07-27 15:36:31 -06004845 cb0.end();
4846 cb0.reset();
4847
4848 // Inter CB detection (dual cb), load is safe, clear errors at submit time
4849 cb0.begin();
4850 do_clear(cb0);
4851 cb0.end();
4852
4853 cb1.begin();
4854 do_begin_rp(cb1);
4855 cb1.EndRenderPass();
4856 cb1.end();
4857
4858 auto submit2 = lvl_init_struct<VkSubmitInfo>();
4859 VkCommandBuffer two_cbs[2] = {cb0.handle(), cb1.handle()};
4860 submit2.commandBufferCount = 2;
4861 submit2.pCommandBuffers = two_cbs;
John Zulauf2f5947d2022-07-27 15:36:31 -06004862 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4863 vk::QueueSubmit(m_device->m_queue, 1, &submit2, VK_NULL_HANDLE);
4864 m_errorMonitor->VerifyFound();
4865}