blob: a0341367b31819cc4af2b00ee0ce93159ac3302c [file] [log] [blame]
Jeremy Gebben170781d2020-11-19 16:21:21 -07001/*
sfricke-samsungae54c1e2022-01-21 05:35:21 -08002 * Copyright (c) 2015-2022 The Khronos Group Inc.
3 * Copyright (c) 2015-2022 Valve Corporation
4 * Copyright (c) 2015-2022 LunarG, Inc.
5 * Copyright (c) 2015-2022 Google, Inc.
Jeremy Gebben170781d2020-11-19 16:21:21 -07006 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Author: Chia-I Wu <olvaffe@gmail.com>
14 * Author: Chris Forbes <chrisf@ijw.co.nz>
15 * Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
16 * Author: Mark Lobodzinski <mark@lunarg.com>
17 * Author: Mike Stroyan <mike@LunarG.com>
18 * Author: Tobin Ehlis <tobine@google.com>
19 * Author: Tony Barbour <tony@LunarG.com>
20 * Author: Cody Northrop <cnorthrop@google.com>
21 * Author: Dave Houlton <daveh@lunarg.com>
22 * Author: Jeremy Kniager <jeremyk@lunarg.com>
23 * Author: Shannon McPherson <shannon@lunarg.com>
24 * Author: John Zulauf <jzulauf@lunarg.com>
25 */
26#include <type_traits>
27
28#include "cast_utils.h"
29#include "layer_validation_tests.h"
30
31TEST_F(VkSyncValTest, SyncBufferCopyHazards) {
32 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
33 if (DeviceExtensionSupported(gpu(), nullptr, VK_AMD_BUFFER_MARKER_EXTENSION_NAME)) {
34 m_device_extension_names.push_back(VK_AMD_BUFFER_MARKER_EXTENSION_NAME);
35 }
36 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
37 bool has_amd_buffer_maker = DeviceExtensionEnabled(VK_AMD_BUFFER_MARKER_EXTENSION_NAME);
38
39 VkBufferObj buffer_a;
40 VkBufferObj buffer_b;
41 VkBufferObj buffer_c;
42 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
43 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
44 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
45 buffer_c.init_as_src_and_dst(*m_device, 256, mem_prop);
46
47 VkBufferCopy region = {0, 0, 256};
48 VkBufferCopy front2front = {0, 0, 128};
49 VkBufferCopy front2back = {0, 128, 128};
50 VkBufferCopy back2back = {128, 128, 128};
51
52 auto cb = m_commandBuffer->handle();
53 m_commandBuffer->begin();
54
55 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
56
57 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
58 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
59 m_errorMonitor->VerifyFound();
60
61 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -070062 auto buffer_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -070063 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
64 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
65 buffer_barrier.buffer = buffer_a.handle();
66 buffer_barrier.offset = 0;
67 buffer_barrier.size = 256;
68 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
69 nullptr);
70
Jeremy Gebben170781d2020-11-19 16:21:21 -070071 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
72 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &back2back);
Jeremy Gebben170781d2020-11-19 16:21:21 -070073
74 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
75 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2back);
76 m_errorMonitor->VerifyFound();
77
78 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
79 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
80 m_errorMonitor->VerifyFound();
81
82 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
83 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
84
85 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -070086 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -070087 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
88 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
89 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
90 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -070091
92 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_c.handle(), buffer_b.handle(), 1, &region);
Jeremy Gebben170781d2020-11-19 16:21:21 -070093
94 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
95 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protect C but not B
96 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
97 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
98 nullptr);
99 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_c.handle(), 1, &region);
100 m_errorMonitor->VerifyFound();
101
102 m_commandBuffer->end();
103
104 // CmdFillBuffer
Jeremy Gebben170781d2020-11-19 16:21:21 -0700105 m_commandBuffer->reset();
106 m_commandBuffer->begin();
107 vk::CmdFillBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, 256, 1);
108 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700109
110 m_commandBuffer->reset();
111 m_commandBuffer->begin();
112 vk::CmdCopyBuffer(cb, buffer_b.handle(), buffer_a.handle(), 1, &region);
113 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
114 vk::CmdFillBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, 256, 1);
115 m_errorMonitor->VerifyFound();
116 m_commandBuffer->end();
117
118 // CmdUpdateBuffer
119 int i = 10;
Jeremy Gebben170781d2020-11-19 16:21:21 -0700120 m_commandBuffer->reset();
121 m_commandBuffer->begin();
122 vk::CmdUpdateBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, sizeof(i), &i);
123 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700124
125 m_commandBuffer->reset();
126 m_commandBuffer->begin();
127 vk::CmdCopyBuffer(cb, buffer_b.handle(), buffer_a.handle(), 1, &region);
128 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
129 vk::CmdUpdateBuffer(m_commandBuffer->handle(), buffer_a.handle(), 0, sizeof(i), &i);
130 m_errorMonitor->VerifyFound();
131 m_commandBuffer->end();
132
John Zulaufcbf67cf2021-04-26 21:06:32 -0600133 // Create secondary buffers to use
John Zulaufcbf67cf2021-04-26 21:06:32 -0600134 VkCommandBufferObj secondary_cb1(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
135 VkCommandBuffer scb1 = secondary_cb1.handle();
136 secondary_cb1.begin();
137 vk::CmdCopyBuffer(scb1, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
138 secondary_cb1.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600139
John Zulaufcbf67cf2021-04-26 21:06:32 -0600140 VkCommandBufferObj secondary_cb2(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
141 VkCommandBuffer scb2 = secondary_cb2.handle();
142 secondary_cb2.begin();
143 vk::CmdCopyBuffer(scb2, buffer_a.handle(), buffer_c.handle(), 1, &front2front);
144 secondary_cb2.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600145
John Zulaufcbf67cf2021-04-26 21:06:32 -0600146 VkCommandBufferObj secondary_cb3(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
147 VkCommandBuffer scb3 = secondary_cb3.handle();
148 secondary_cb3.begin();
149 secondary_cb3.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 0,
150 nullptr);
151 secondary_cb3.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600152
John Zulaufcbf67cf2021-04-26 21:06:32 -0600153 VkCommandBufferObj secondary_cb4(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
154 VkCommandBuffer scb4 = secondary_cb4.handle();
155 secondary_cb4.begin();
156 vk::CmdCopyBuffer(scb4, buffer_b.handle(), buffer_c.handle(), 1, &front2front);
157 secondary_cb4.end();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600158
159 // One secondary CB hazard with active command buffer
John Zulaufee17cce2021-04-15 18:21:38 -0600160 m_commandBuffer->reset();
161 m_commandBuffer->begin();
162 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
John Zulaufee17cce2021-04-15 18:21:38 -0600163 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
John Zulaufcbf67cf2021-04-26 21:06:32 -0600164 vk::CmdExecuteCommands(cb, 1, &scb1);
John Zulaufee17cce2021-04-15 18:21:38 -0600165 m_errorMonitor->VerifyFound();
166 m_commandBuffer->end();
167
John Zulaufcbf67cf2021-04-26 21:06:32 -0600168 // Two secondary CB hazard with each other
John Zulaufee17cce2021-04-15 18:21:38 -0600169 m_commandBuffer->reset();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600170 m_commandBuffer->begin();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600171 // This is also a "SYNC-HAZARD-WRITE_AFTER_WRITE" present, but only the first hazard is reported.
172 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
173 {
174 VkCommandBuffer two_cbs[2] = {scb1, scb2};
175 vk::CmdExecuteCommands(cb, 2, two_cbs);
176 }
177 m_errorMonitor->VerifyFound();
178 m_commandBuffer->end();
John Zulaufee17cce2021-04-15 18:21:38 -0600179
John Zulaufcbf67cf2021-04-26 21:06:32 -0600180 // Two secondary CB hazard with each other
181 m_commandBuffer->reset();
182 m_commandBuffer->begin();
John Zulaufcbf67cf2021-04-26 21:06:32 -0600183 {
184 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
185 VkCommandBuffer two_cbs[2] = {scb1, scb4};
186 vk::CmdExecuteCommands(cb, 2, two_cbs);
187 m_errorMonitor->VerifyFound();
188 }
189 m_commandBuffer->end();
190
191 // Add a secondary CB with a barrier
192 m_commandBuffer->reset();
193 m_commandBuffer->begin();
194 {
John Zulaufcbf67cf2021-04-26 21:06:32 -0600195 VkCommandBuffer three_cbs[3] = {scb1, scb3, scb4};
196 vk::CmdExecuteCommands(cb, 3, three_cbs);
John Zulaufcbf67cf2021-04-26 21:06:32 -0600197 }
198 m_commandBuffer->end();
199
200 m_commandBuffer->reset();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700201 // CmdWriteBufferMarkerAMD
202 if (has_amd_buffer_maker) {
203 auto fpCmdWriteBufferMarkerAMD =
204 (PFN_vkCmdWriteBufferMarkerAMD)vk::GetDeviceProcAddr(m_device->device(), "vkCmdWriteBufferMarkerAMD");
205 if (!fpCmdWriteBufferMarkerAMD) {
206 printf("%s Test requires unsupported vkCmdWriteBufferMarkerAMD feature. Skipped.\n", kSkipPrefix);
207 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -0700208 m_commandBuffer->reset();
209 m_commandBuffer->begin();
210 fpCmdWriteBufferMarkerAMD(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, buffer_a.handle(), 0, 1);
211 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700212
213 m_commandBuffer->reset();
214 m_commandBuffer->begin();
215 vk::CmdCopyBuffer(cb, buffer_b.handle(), buffer_a.handle(), 1, &region);
216 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
217 fpCmdWriteBufferMarkerAMD(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, buffer_a.handle(), 0, 1);
218 m_errorMonitor->VerifyFound();
219 m_commandBuffer->end();
220 }
221 } else {
222 printf("%s Test requires unsupported vkCmdWriteBufferMarkerAMD feature. Skipped.\n", kSkipPrefix);
223 }
224}
225
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700226TEST_F(VkSyncValTest, Sync2BufferCopyHazards) {
227 SetTargetApiVersion(VK_API_VERSION_1_2);
228 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
229 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME)) {
230 m_device_extension_names.push_back(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
231 } else {
232 printf("%s Synchronization2 not supported, skipping test\n", kSkipPrefix);
233 return;
234 }
235
236 if (!CheckSynchronization2SupportAndInitState(this)) {
237 printf("%s Synchronization2 not supported, skipping test\n", kSkipPrefix);
238 return;
239 }
240 auto fpCmdPipelineBarrier2KHR = (PFN_vkCmdPipelineBarrier2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPipelineBarrier2KHR");
241
242 VkBufferObj buffer_a;
243 VkBufferObj buffer_b;
244 VkBufferObj buffer_c;
245 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
246 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
247 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
248 buffer_c.init_as_src_and_dst(*m_device, 256, mem_prop);
249
250 VkBufferCopy region = {0, 0, 256};
251 VkBufferCopy front2front = {0, 0, 128};
252 VkBufferCopy front2back = {0, 128, 128};
253 VkBufferCopy back2back = {128, 128, 128};
254
255 auto cb = m_commandBuffer->handle();
256 m_commandBuffer->begin();
257
258 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
259
260 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
261 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
262 m_errorMonitor->VerifyFound();
263
264 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
265 {
266 auto buffer_barrier = lvl_init_struct<VkBufferMemoryBarrier2KHR>();
267 buffer_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
268 buffer_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
269 buffer_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR;
270 buffer_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
271 buffer_barrier.buffer = buffer_a.handle();
272 buffer_barrier.offset = 0;
273 buffer_barrier.size = 256;
274 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
275 dep_info.bufferMemoryBarrierCount = 1;
276 dep_info.pBufferMemoryBarriers = &buffer_barrier;
277 fpCmdPipelineBarrier2KHR(cb, &dep_info);
278 }
279
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700280 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
281 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &back2back);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700282
283 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
284 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2back);
285 m_errorMonitor->VerifyFound();
286
287 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
288 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
289 m_errorMonitor->VerifyFound();
290
291 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
292 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
293
294 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
295 {
296 auto mem_barrier = lvl_init_struct<VkMemoryBarrier2KHR>();
297 mem_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
298 mem_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
299 mem_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
300 mem_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
301 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
302 dep_info.memoryBarrierCount = 1;
303 dep_info.pMemoryBarriers = &mem_barrier;
304 fpCmdPipelineBarrier2KHR(cb, &dep_info);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700305
306 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_c.handle(), buffer_b.handle(), 1, &region);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700307
308 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
309 mem_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR; // Protect C but not B
310 mem_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
311 fpCmdPipelineBarrier2KHR(cb, &dep_info);
312 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_c.handle(), 1, &region);
313 m_errorMonitor->VerifyFound();
314
315 m_commandBuffer->end();
316 }
317}
318
Jeremy Gebben170781d2020-11-19 16:21:21 -0700319TEST_F(VkSyncValTest, SyncCopyOptimalImageHazards) {
320 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
321 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
322
323 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
324 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
325 VkImageObj image_a(m_device);
326 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
327 image_a.Init(image_ci);
328 ASSERT_TRUE(image_a.initialized());
329
330 VkImageObj image_b(m_device);
331 image_b.Init(image_ci);
332 ASSERT_TRUE(image_b.initialized());
333
334 VkImageObj image_c(m_device);
335 image_c.Init(image_ci);
336 ASSERT_TRUE(image_c.initialized());
337
338 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 2};
339 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
340 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
341 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
342 VkOffset3D zero_offset{0, 0, 0};
343 VkOffset3D half_offset{64, 64, 0};
344 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
345 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
346
347 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
348 VkImageCopy region_0_to_0 = {layers_0, zero_offset, layers_0, zero_offset, full_extent};
349 VkImageCopy region_0_to_1 = {layers_0, zero_offset, layers_1, zero_offset, full_extent};
350 VkImageCopy region_1_to_1 = {layers_1, zero_offset, layers_1, zero_offset, full_extent};
351 VkImageCopy region_0_front = {layers_0, zero_offset, layers_0, zero_offset, half_extent};
352 VkImageCopy region_0_back = {layers_0, half_offset, layers_0, half_offset, half_extent};
353
354 m_commandBuffer->begin();
355
356 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
357 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
358 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
359
360 auto cb = m_commandBuffer->handle();
361
362 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
363
364 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
365 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
366 m_errorMonitor->VerifyFound();
367
368 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700369 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700370 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
371 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
372 image_barrier.image = image_a.handle();
373 image_barrier.subresourceRange = full_subresource_range;
374 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
375 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
376 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
377 &image_barrier);
378
Jeremy Gebben170781d2020-11-19 16:21:21 -0700379 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_0);
380 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_1_to_1);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700381
382 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
383 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_1);
384 m_errorMonitor->VerifyFound();
385
386 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
387 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
388 m_errorMonitor->VerifyFound();
389
390 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
391 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
392
393 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700394 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700395 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
396 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
397 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
398 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700399 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700400
401 // Use barrier to protect last reader, but not last writer...
402 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
403 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
404 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
405 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
406 nullptr);
407 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
408 m_errorMonitor->VerifyFound();
409
410 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
411 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
412 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
413 m_errorMonitor->VerifyFound();
414
Jeremy Gebben170781d2020-11-19 16:21:21 -0700415 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700416
417 m_commandBuffer->end();
418
John Zulaufe972b752021-05-04 15:47:17 -0600419 // Test secondary command buffers
420 // Create secondary buffers to use
John Zulaufe972b752021-05-04 15:47:17 -0600421 VkCommandBufferObj secondary_cb1(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
422 VkCommandBuffer scb1 = secondary_cb1.handle();
423 secondary_cb1.begin();
424 vk::CmdCopyImage(scb1, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
425 secondary_cb1.end();
John Zulaufe972b752021-05-04 15:47:17 -0600426
427 auto record_primary = [&]() {
428 m_commandBuffer->reset();
429 m_commandBuffer->begin();
430 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
431 vk::CmdExecuteCommands(cb, 1, &scb1);
432 m_commandBuffer->end();
433 };
434
435 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
436 record_primary();
437 m_errorMonitor->VerifyFound();
438
John Zulaufe972b752021-05-04 15:47:17 -0600439 // With a barrier...
440 secondary_cb1.reset();
441 secondary_cb1.begin();
442 vk::CmdPipelineBarrier(scb1, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
443 nullptr);
444 vk::CmdCopyImage(scb1, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
445 secondary_cb1.end();
446 record_primary();
John Zulaufe972b752021-05-04 15:47:17 -0600447
448 auto image_transition_barrier = image_barrier;
449 image_transition_barrier.image = image_a.handle();
450 image_transition_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
451 image_transition_barrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
452
John Zulaufe972b752021-05-04 15:47:17 -0600453 secondary_cb1.reset();
454 secondary_cb1.begin();
455 // Use the wrong stage, get an error
456 vk::CmdPipelineBarrier(scb1, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, nullptr, 0, nullptr, 1,
457 &image_transition_barrier);
458 secondary_cb1.end();
John Zulaufe972b752021-05-04 15:47:17 -0600459
460 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
461 record_primary();
462 m_errorMonitor->VerifyFound();
463
464 // CmdResolveImage hazard testing
Jeremy Gebben170781d2020-11-19 16:21:21 -0700465 VkImageFormatProperties formProps = {{0, 0, 0}, 0, 0, 0, 0};
466 vk::GetPhysicalDeviceImageFormatProperties(m_device->phy().handle(), VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_TYPE_2D,
467 VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, 0, &formProps);
468
469 if (!(formProps.sampleCounts & VK_SAMPLE_COUNT_2_BIT)) {
470 printf("%s CmdResolveImage Test requires unsupported VK_SAMPLE_COUNT_2_BIT feature. Skipped.\n", kSkipPrefix);
471 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -0700472 VkImageObj image_s2_a(m_device), image_s2_b(m_device);
473 image_ci.samples = VK_SAMPLE_COUNT_2_BIT;
474 image_s2_a.Init(image_ci);
475 ASSERT_TRUE(image_s2_a.initialized());
476
477 image_s2_b.Init(image_ci);
478 ASSERT_TRUE(image_s2_b.initialized());
479
480 VkImageResolve r_full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
481
482 m_commandBuffer->reset();
483 m_commandBuffer->begin();
484 image_s2_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
485 image_s2_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
486 vk::CmdResolveImage(cb, image_s2_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
487 &r_full_region);
488 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700489
490 m_commandBuffer->reset();
491 m_commandBuffer->begin();
492 vk::CmdCopyImage(cb, image_s2_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s2_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
493 &full_region);
494 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
495
496 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
497 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
498 vk::CmdResolveImage(cb, image_s2_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
499 &r_full_region);
500 m_errorMonitor->VerifyFound();
501
502 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
503 vk::CmdResolveImage(cb, image_s2_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
504 &r_full_region);
505 m_errorMonitor->VerifyFound();
506 m_commandBuffer->end();
507 }
508}
509
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700510TEST_F(VkSyncValTest, Sync2CopyOptimalImageHazards) {
511 SetTargetApiVersion(VK_API_VERSION_1_2);
512 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
513 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME)) {
514 m_device_extension_names.push_back(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
515 } else {
516 printf("%s Synchronization2 not supported, skipping test\n", kSkipPrefix);
517 return;
518 }
519
520 if (!CheckSynchronization2SupportAndInitState(this)) {
521 printf("%s Synchronization2 not supported, skipping test\n", kSkipPrefix);
522 return;
523 }
524 auto fpCmdPipelineBarrier2KHR = (PFN_vkCmdPipelineBarrier2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPipelineBarrier2KHR");
525
526 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
527 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
528 VkImageObj image_a(m_device);
529 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
530 image_a.Init(image_ci);
531 ASSERT_TRUE(image_a.initialized());
532
533 VkImageObj image_b(m_device);
534 image_b.Init(image_ci);
535 ASSERT_TRUE(image_b.initialized());
536
537 VkImageObj image_c(m_device);
538 image_c.Init(image_ci);
539 ASSERT_TRUE(image_c.initialized());
540
541 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 2};
542 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
543 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
544 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 2};
545 VkOffset3D zero_offset{0, 0, 0};
546 VkOffset3D half_offset{64, 64, 0};
547 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
548 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
549
550 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
551 VkImageCopy region_0_to_0 = {layers_0, zero_offset, layers_0, zero_offset, full_extent};
552 VkImageCopy region_0_to_1 = {layers_0, zero_offset, layers_1, zero_offset, full_extent};
553 VkImageCopy region_1_to_1 = {layers_1, zero_offset, layers_1, zero_offset, full_extent};
554 VkImageCopy region_0_front = {layers_0, zero_offset, layers_0, zero_offset, half_extent};
555 VkImageCopy region_0_back = {layers_0, half_offset, layers_0, half_offset, half_extent};
556
557 m_commandBuffer->begin();
558
559 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
560 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
561 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
562
563 auto cb = m_commandBuffer->handle();
564
565 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
566
567 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
568 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
569 m_errorMonitor->VerifyFound();
570
571 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
572 {
573 auto image_barrier = lvl_init_struct<VkImageMemoryBarrier2KHR>();
574 image_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
575 image_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
576 image_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR;
577 image_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
578 image_barrier.image = image_a.handle();
579 image_barrier.subresourceRange = full_subresource_range;
580 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
581 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
582 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
583 dep_info.imageMemoryBarrierCount = 1;
584 dep_info.pImageMemoryBarriers = &image_barrier;
585 fpCmdPipelineBarrier2KHR(cb, &dep_info);
586 }
587
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700588 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_0);
589 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_1_to_1);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700590
591 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
592 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_to_1);
593 m_errorMonitor->VerifyFound();
594
595 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
596 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
597 m_errorMonitor->VerifyFound();
598
599 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
600 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
601
602 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
603 {
604 auto mem_barrier = lvl_init_struct<VkMemoryBarrier2KHR>();
605 mem_barrier.srcStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
606 mem_barrier.dstStageMask = VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
607 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
608 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
609 auto dep_info = lvl_init_struct<VkDependencyInfoKHR>();
610 dep_info.memoryBarrierCount = 1;
611 dep_info.pMemoryBarriers = &mem_barrier;
612 fpCmdPipelineBarrier2KHR(cb, &dep_info);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700613 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700614
615 // Use barrier to protect last reader, but not last writer...
616 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
617 mem_barrier.srcAccessMask = VK_ACCESS_2_TRANSFER_READ_BIT_KHR; // Protects C but not B
618 mem_barrier.dstAccessMask = VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR;
619 fpCmdPipelineBarrier2KHR(cb, &dep_info);
620 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
621 m_errorMonitor->VerifyFound();
622 }
623
624 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
625 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
626 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_front);
627 m_errorMonitor->VerifyFound();
628
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700629 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_0_back);
Jeremy Gebben5c1bb2d2021-02-15 08:58:04 -0700630
631 m_commandBuffer->end();
632}
633
Jeremy Gebben170781d2020-11-19 16:21:21 -0700634TEST_F(VkSyncValTest, SyncCopyOptimalMultiPlanarHazards) {
635 // TODO: Add code to enable sync validation
636 // Enable KHR multiplane req'd extensions
637 bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
638 VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
639 if (mp_extensions) {
640 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
641 }
642 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700643 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700644 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
645 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
646 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
647 if (mp_extensions) {
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700648 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700649 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
650 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
651 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
652 } else {
653 printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
654 return;
655 }
656
657 ASSERT_NO_FATAL_FAILURE(InitState());
658
659 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
660 VkFormat format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
661 VkImageObj image_a(m_device);
662 const auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
663 // Verify format
664 bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), image_ci,
665 VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT);
666 if (!supported) {
667 printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
668 return; // Assume there's low ROI on searching for different mp formats
669 }
670
671 image_a.Init(image_ci);
672 VkImageObj image_b(m_device);
673 image_b.Init(image_ci);
674 VkImageObj image_c(m_device);
675 image_c.Init(image_ci);
676
677 VkImageSubresourceLayers layer_all_plane0{VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, 0, 0, 2};
678 VkImageSubresourceLayers layer0_plane0{VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, 0, 0, 1};
679 VkImageSubresourceLayers layer0_plane1{VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, 0, 0, 1};
680 VkImageSubresourceLayers layer1_plane1{VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, 0, 1, 1};
681 VkImageSubresourceRange full_subresource_range{
682 VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR, 0, 1, 0, 2};
683 VkOffset3D zero_offset{0, 0, 0};
684 VkOffset3D one_four_offset{32, 32, 0};
685 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
686 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
687 VkExtent3D one_four_extent{32, 32, 1}; // <-- image type is 2D
688
689 VkImageCopy region_all_plane0_to_all_plane0 = {layer_all_plane0, zero_offset, layer_all_plane0, zero_offset, full_extent};
690 VkImageCopy region_layer0_plane0_to_layer0_plane0 = {layer0_plane0, zero_offset, layer0_plane0, zero_offset, full_extent};
691 VkImageCopy region_layer0_plane0_to_layer0_plane1 = {layer0_plane0, zero_offset, layer0_plane1, zero_offset, half_extent};
692 VkImageCopy region_layer1_plane1_to_layer1_plane1_front = {layer1_plane1, zero_offset, layer1_plane1, zero_offset,
693 one_four_extent};
694 VkImageCopy region_layer1_plane1_to_layer1_plane1_back = {layer1_plane1, one_four_offset, layer1_plane1, one_four_offset,
695 one_four_extent};
696
697 m_commandBuffer->begin();
698
699 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
700 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
701 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
702
703 auto cb = m_commandBuffer->handle();
704
705 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
706 &region_all_plane0_to_all_plane0);
707
708 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
709 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
710 &region_all_plane0_to_all_plane0);
711 m_errorMonitor->VerifyFound();
712
713 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700714 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700715 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
716 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
717 image_barrier.image = image_a.handle();
718 image_barrier.subresourceRange = full_subresource_range;
719 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
720 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
721 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
722 &image_barrier);
723
Jeremy Gebben170781d2020-11-19 16:21:21 -0700724 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
725 &region_layer0_plane0_to_layer0_plane0);
726 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
727 &region_layer0_plane0_to_layer0_plane1);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700728
729 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
730 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
731 &region_layer0_plane0_to_layer0_plane1);
732 m_errorMonitor->VerifyFound();
733
734 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
735 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
736 &region_all_plane0_to_all_plane0);
737 m_errorMonitor->VerifyFound();
738
739 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
740 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
741
742 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700743 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700744 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
745 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
746 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
747 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700748 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
749 &region_all_plane0_to_all_plane0);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700750
751 // Use barrier to protect last reader, but not last writer...
752 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
753 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
754 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
755 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
756 nullptr);
757 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
758 &region_all_plane0_to_all_plane0);
759 m_errorMonitor->VerifyFound();
760
761 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
762 &region_layer1_plane1_to_layer1_plane1_front);
763 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
764 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
765 &region_layer1_plane1_to_layer1_plane1_front);
766 m_errorMonitor->VerifyFound();
767
Jeremy Gebben170781d2020-11-19 16:21:21 -0700768 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
769 &region_layer1_plane1_to_layer1_plane1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700770
771 m_commandBuffer->end();
772}
773
774TEST_F(VkSyncValTest, SyncCopyLinearImageHazards) {
775 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
776 ASSERT_NO_FATAL_FAILURE(InitState());
777
778 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
779 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
780 VkImageObj image_a(m_device);
781 const auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_LINEAR);
782 image_a.Init(image_ci);
783 VkImageObj image_b(m_device);
784 image_b.Init(image_ci);
785 VkImageObj image_c(m_device);
786 image_c.Init(image_ci);
787
788 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
789 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
790 VkOffset3D zero_offset{0, 0, 0};
791 VkOffset3D half_offset{64, 64, 0};
792 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
793 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
794
795 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
796 VkImageCopy region_front = {layers_all, zero_offset, layers_all, zero_offset, half_extent};
797 VkImageCopy region_back = {layers_all, half_offset, layers_all, half_offset, half_extent};
798
799 m_commandBuffer->begin();
800
801 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
802 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
803 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
804
805 auto cb = m_commandBuffer->handle();
806
807 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
808
809 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
810 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
811 m_errorMonitor->VerifyFound();
812
813 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700814 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700815 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
816 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
817 image_barrier.image = image_b.handle();
818 image_barrier.subresourceRange = full_subresource_range;
819 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
820 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
821 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
822 &image_barrier);
823
Jeremy Gebben170781d2020-11-19 16:21:21 -0700824 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700825
826 // Use barrier to protect last reader, but not last writer...
827 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
828 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
829 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
830 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
831 &image_barrier);
832 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
833 m_errorMonitor->VerifyFound();
834
835 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_front);
836 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
837 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_front);
838 m_errorMonitor->VerifyFound();
839
Jeremy Gebben170781d2020-11-19 16:21:21 -0700840 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700841}
842
843TEST_F(VkSyncValTest, SyncCopyLinearMultiPlanarHazards) {
844 // TODO: Add code to enable sync validation
845 // Enable KHR multiplane req'd extensions
846 bool mp_extensions = InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME,
847 VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION);
848 if (mp_extensions) {
849 m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
850 }
851 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700852 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700853 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
854 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
855 mp_extensions = mp_extensions && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
856 if (mp_extensions) {
Mike Schuchardt7cc57842021-09-15 10:49:59 -0700857 m_device_extension_names.push_back(VK_KHR_MAINTENANCE_1_EXTENSION_NAME);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700858 m_device_extension_names.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
859 m_device_extension_names.push_back(VK_KHR_BIND_MEMORY_2_EXTENSION_NAME);
860 m_device_extension_names.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
861 } else {
862 printf("%s test requires KHR multiplane extensions, not available. Skipping.\n", kSkipPrefix);
863 return;
864 }
865
866 ASSERT_NO_FATAL_FAILURE(InitState());
867
868 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
869 VkFormat format = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
870 VkImageObj image_a(m_device);
871 const auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_LINEAR);
872 // Verify format
873 bool supported = ImageFormatAndFeaturesSupported(instance(), gpu(), image_ci,
874 VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT);
875 if (!supported) {
876 printf("%s Multiplane image format not supported. Skipping test.\n", kSkipPrefix);
877 return; // Assume there's low ROI on searching for different mp formats
878 }
879
880 image_a.Init(image_ci);
881 VkImageObj image_b(m_device);
882 image_b.Init(image_ci);
883 VkImageObj image_c(m_device);
884 image_c.Init(image_ci);
885
886 VkImageSubresourceLayers layer_all_plane0{VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, 0, 0, 1};
887 VkImageSubresourceLayers layer_all_plane1{VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, 0, 0, 1};
888 VkImageSubresourceRange full_subresource_range{
889 VK_IMAGE_ASPECT_PLANE_0_BIT_KHR | VK_IMAGE_ASPECT_PLANE_1_BIT_KHR | VK_IMAGE_ASPECT_PLANE_2_BIT_KHR, 0, 1, 0, 1};
890 VkOffset3D zero_offset{0, 0, 0};
891 VkOffset3D one_four_offset{32, 32, 0};
892 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
893 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
894 VkExtent3D one_four_extent{32, 32, 1}; // <-- image type is 2D
895
896 VkImageCopy region_plane0_to_plane0 = {layer_all_plane0, zero_offset, layer_all_plane0, zero_offset, full_extent};
897 VkImageCopy region_plane0_to_plane1 = {layer_all_plane0, zero_offset, layer_all_plane1, zero_offset, half_extent};
898 VkImageCopy region_plane1_to_plane1_front = {layer_all_plane1, zero_offset, layer_all_plane1, zero_offset, one_four_extent};
899 VkImageCopy region_plane1_to_plane1_back = {layer_all_plane1, one_four_offset, layer_all_plane1, one_four_offset,
900 one_four_extent};
901
902 m_commandBuffer->begin();
903
904 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
905 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
906 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
907
908 auto cb = m_commandBuffer->handle();
909
910 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
911 &region_plane0_to_plane0);
912
913 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
914 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
915 &region_plane0_to_plane0);
916 m_errorMonitor->VerifyFound();
917
918 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700919 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700920 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
921 image_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
922 image_barrier.image = image_a.handle();
923 image_barrier.subresourceRange = full_subresource_range;
924 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
925 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
926 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 0, nullptr, 1,
927 &image_barrier);
928
Jeremy Gebben170781d2020-11-19 16:21:21 -0700929 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
930 &region_plane0_to_plane0);
931 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
932 &region_plane0_to_plane1);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700933
934 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
935 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
936 &region_plane0_to_plane1);
937 m_errorMonitor->VerifyFound();
938
939 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
940 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
941 &region_plane0_to_plane0);
942 m_errorMonitor->VerifyFound();
943
944 // NOTE: Since the previous command skips in validation, the state update is never done, and the validation layer thus doesn't
945 // record the write operation to b. So we'll need to repeat it successfully to set up for the *next* test.
946
947 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
Mark Lobodzinski07d0a612020-12-30 15:42:31 -0700948 auto mem_barrier = LvlInitStruct<VkMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -0700949 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
950 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
951 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
952 nullptr);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700953 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
954 &region_plane0_to_plane0);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700955
956 // Use barrier to protect last reader, but not last writer...
957 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
958 mem_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // Protects C but not B
959 mem_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
960 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &mem_barrier, 0, nullptr, 0,
961 nullptr);
962 vk::CmdCopyImage(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
963 &region_plane0_to_plane0);
964 m_errorMonitor->VerifyFound();
965
966 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
967 &region_plane1_to_plane1_front);
968 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
969 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
970 &region_plane1_to_plane1_front);
971 m_errorMonitor->VerifyFound();
972
Jeremy Gebben170781d2020-11-19 16:21:21 -0700973 vk::CmdCopyImage(cb, image_c.handle(), VK_IMAGE_LAYOUT_GENERAL, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
974 &region_plane1_to_plane1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -0700975
976 m_commandBuffer->end();
977}
978
979TEST_F(VkSyncValTest, SyncCopyBufferImageHazards) {
980 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
981 ASSERT_NO_FATAL_FAILURE(InitState());
982
983 VkBufferObj buffer_a, buffer_b;
984 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
985 buffer_a.init_as_src_and_dst(*m_device, 2048, mem_prop);
986 buffer_b.init_as_src_and_dst(*m_device, 2048, mem_prop);
987
988 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
989 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
990 VkImageObj image_a(m_device), image_b(m_device);
991 const auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
992 image_a.Init(image_ci);
993 image_b.Init(image_ci);
994
995 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
996 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
997 VkOffset3D zero_offset{0, 0, 0};
998 VkOffset3D half_offset{16, 16, 0};
999 VkExtent3D half_extent{16, 16, 1}; // <-- image type is 2D
1000
1001 VkBufferImageCopy region_buffer_front_image_0_front = {0, 16, 16, layers_0, zero_offset, half_extent};
1002 VkBufferImageCopy region_buffer_front_image_1_front = {0, 16, 16, layers_1, zero_offset, half_extent};
1003 VkBufferImageCopy region_buffer_front_image_1_back = {0, 16, 16, layers_1, half_offset, half_extent};
1004 VkBufferImageCopy region_buffer_back_image_0_front = {1024, 16, 16, layers_0, zero_offset, half_extent};
1005 VkBufferImageCopy region_buffer_back_image_0_back = {1024, 16, 16, layers_0, half_offset, half_extent};
1006 VkBufferImageCopy region_buffer_back_image_1_front = {1024, 16, 16, layers_1, zero_offset, half_extent};
1007 VkBufferImageCopy region_buffer_back_image_1_back = {1024, 16, 16, layers_1, half_offset, half_extent};
1008
1009 m_commandBuffer->begin();
1010 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1011 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1012
1013 auto cb = m_commandBuffer->handle();
1014 vk::CmdCopyBufferToImage(cb, buffer_a.handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1015 &region_buffer_front_image_0_front);
1016
1017 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1018 vk::CmdCopyBufferToImage(cb, buffer_a.handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1019 &region_buffer_front_image_0_front);
1020 m_errorMonitor->VerifyFound();
1021
1022 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1023 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1024 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1025 &region_buffer_front_image_0_front);
1026 m_errorMonitor->VerifyFound();
1027
1028 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1029 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1030 &region_buffer_back_image_0_front);
1031 m_errorMonitor->VerifyFound();
1032
1033 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1034 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1035 &region_buffer_front_image_1_front);
1036 m_errorMonitor->VerifyFound();
1037
1038 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1039 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1040 &region_buffer_front_image_1_back);
1041 m_errorMonitor->VerifyFound();
1042
Jeremy Gebben170781d2020-11-19 16:21:21 -07001043 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1, &region_buffer_back_image_0_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001044
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001045 auto buffer_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001046 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1047 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1048 buffer_barrier.buffer = buffer_a.handle();
1049 buffer_barrier.offset = 1024;
1050 buffer_barrier.size = 2048;
1051 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1052 nullptr);
1053
Jeremy Gebben170781d2020-11-19 16:21:21 -07001054 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1,
1055 &region_buffer_back_image_1_front);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001056
1057 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1058 nullptr);
1059
Jeremy Gebben170781d2020-11-19 16:21:21 -07001060 vk::CmdCopyImageToBuffer(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_a.handle(), 1, &region_buffer_back_image_1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001061
1062 vk::CmdCopyImageToBuffer(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_b.handle(), 1,
1063 &region_buffer_front_image_0_front);
1064
1065 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1066 vk::CmdCopyImageToBuffer(cb, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, buffer_b.handle(), 1,
1067 &region_buffer_front_image_0_front);
1068 m_errorMonitor->VerifyFound();
1069
1070 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1071 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1072 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1073 &region_buffer_front_image_0_front);
1074 m_errorMonitor->VerifyFound();
1075
1076 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1077 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1078 &region_buffer_back_image_0_front);
1079 m_errorMonitor->VerifyFound();
1080
1081 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1082 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1083 &region_buffer_front_image_1_front);
1084 m_errorMonitor->VerifyFound();
1085
1086 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1087 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1088 &region_buffer_front_image_1_back);
1089 m_errorMonitor->VerifyFound();
1090
Jeremy Gebben170781d2020-11-19 16:21:21 -07001091 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_buffer_back_image_0_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001092
1093 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1094 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
1095 buffer_barrier.buffer = buffer_b.handle();
1096 buffer_barrier.offset = 1024;
1097 buffer_barrier.size = 2048;
1098 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1099 nullptr);
1100
Jeremy Gebben170781d2020-11-19 16:21:21 -07001101 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1102 &region_buffer_back_image_1_front);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001103
1104 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1, &buffer_barrier, 0,
1105 nullptr);
1106
Jeremy Gebben170781d2020-11-19 16:21:21 -07001107 vk::CmdCopyBufferToImage(cb, buffer_b.handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_buffer_back_image_1_back);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001108
1109 m_commandBuffer->end();
1110}
1111
1112TEST_F(VkSyncValTest, SyncBlitImageHazards) {
1113 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1114 ASSERT_NO_FATAL_FAILURE(InitState());
1115
1116 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1117 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1118 VkImageObj image_a(m_device), image_b(m_device);
1119 const auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
1120 image_a.Init(image_ci);
1121 image_b.Init(image_ci);
1122
1123 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1124 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
1125 VkOffset3D zero_offset{0, 0, 0};
1126 VkOffset3D half_0_offset{16, 16, 0};
1127 VkOffset3D half_1_offset{16, 16, 1};
1128 VkOffset3D full_offset{32, 32, 1};
1129 VkImageBlit region_0_front_1_front = {layers_0, {zero_offset, half_1_offset}, layers_1, {zero_offset, half_1_offset}};
1130 VkImageBlit region_1_front_0_front = {layers_1, {zero_offset, half_1_offset}, layers_0, {zero_offset, half_1_offset}};
1131 VkImageBlit region_1_back_0_back = {layers_1, {half_0_offset, full_offset}, layers_0, {half_0_offset, full_offset}};
1132
1133 m_commandBuffer->begin();
1134 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1135 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1136
1137 auto cb = m_commandBuffer->handle();
1138
1139 vk::CmdBlitImage(cb, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1140 &region_0_front_1_front, VK_FILTER_NEAREST);
1141
1142 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1143 vk::CmdBlitImage(cb, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1144 &region_0_front_1_front, VK_FILTER_NEAREST);
1145 m_errorMonitor->VerifyFound();
1146
1147 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1148 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1149 vk::CmdBlitImage(cb, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1150 &region_1_front_0_front, VK_FILTER_NEAREST);
1151 m_errorMonitor->VerifyFound();
1152
Jeremy Gebben170781d2020-11-19 16:21:21 -07001153 vk::CmdBlitImage(cb, image_b.image(), VK_IMAGE_LAYOUT_GENERAL, image_a.image(), VK_IMAGE_LAYOUT_GENERAL, 1,
1154 &region_1_back_0_back, VK_FILTER_NEAREST);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001155
1156 m_commandBuffer->end();
1157}
1158
1159TEST_F(VkSyncValTest, SyncRenderPassBeginTransitionHazard) {
1160 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1161 ASSERT_NO_FATAL_FAILURE(InitState());
John Zulaufbb373682021-10-05 17:21:40 -06001162 const VkSubpassDependency external_subpass_dependency = {VK_SUBPASS_EXTERNAL,
1163 0,
1164 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
1165 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
1166 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1167 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
1168 VK_DEPENDENCY_BY_REGION_BIT};
1169 m_additionalSubpassDependencies.push_back(external_subpass_dependency);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001170 ASSERT_NO_FATAL_FAILURE(InitRenderTarget(2));
1171
1172 // Render Target Information
1173 auto width = static_cast<uint32_t>(m_width);
1174 auto height = static_cast<uint32_t>(m_height);
1175 auto *rt_0 = m_renderTargets[0].get();
1176 auto *rt_1 = m_renderTargets[1].get();
1177
1178 // Other buffers with which to interact
1179 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1180 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1181 VkImageObj image_a(m_device), image_b(m_device);
1182 const auto image_ci = VkImageObj::ImageCreateInfo2D(width, height, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
1183 image_a.Init(image_ci);
1184 image_b.Init(image_ci);
1185
1186 VkOffset3D zero_offset{0, 0, 0};
1187 VkExtent3D full_extent{width, height, 1}; // <-- image type is 2D
1188 VkImageSubresourceLayers layer_color{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1189 VkImageCopy region_to_copy = {layer_color, zero_offset, layer_color, zero_offset, full_extent};
1190
1191 auto cb = m_commandBuffer->handle();
1192
Jeremy Gebben170781d2020-11-19 16:21:21 -07001193 m_commandBuffer->begin();
1194 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1195 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1196 rt_0->SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1197 rt_1->SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1198
1199 rt_0->SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1200 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, rt_0->handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_to_copy);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001201
1202 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1203 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo); // This fails so the driver call is skip and no end is valid
1204 m_errorMonitor->VerifyFound();
1205
Jeremy Gebben170781d2020-11-19 16:21:21 -07001206 // Use the barrier to clean up the WAW, and try again. (and show that validation is accounting for the barrier effect too.)
1207 VkImageSubresourceRange rt_full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001208 auto image_barrier = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001209 image_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
John Zulaufbb373682021-10-05 17:21:40 -06001210 image_barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001211 image_barrier.image = rt_0->handle();
1212 image_barrier.subresourceRange = rt_full_subresource_range;
1213 image_barrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
1214 image_barrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
John Zulaufbb373682021-10-05 17:21:40 -06001215 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0,
1216 nullptr, 1, &image_barrier);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001217 vk::CmdCopyImage(cb, rt_1->handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region_to_copy);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001218
1219 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1220 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo); // This fails so the driver call is skip and no end is valid
1221 m_errorMonitor->VerifyFound();
1222
Jeremy Gebben170781d2020-11-19 16:21:21 -07001223 // A global execution barrier that the implict external dependency can chain with should work...
1224 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, nullptr, 0, nullptr, 0,
1225 nullptr);
1226
1227 // With the barrier above, the layout transition has a chained execution sync operation, and the default
1228 // implict VkSubpassDependency safes the load op clear vs. the layout transition...
1229 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1230 m_commandBuffer->EndRenderPass();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001231}
1232
1233TEST_F(VkSyncValTest, SyncCmdDispatchDrawHazards) {
1234 // TODO: Add code to enable sync validation
1235 SetTargetApiVersion(VK_API_VERSION_1_2);
1236
1237 // Enable VK_KHR_draw_indirect_count for KHR variants
1238 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
sfricke-samsung6fc3e322022-02-15 22:41:29 -08001239 VkPhysicalDeviceVulkan12Features features12 = LvlInitStruct<VkPhysicalDeviceVulkan12Features>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001240 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME)) {
1241 m_device_extension_names.push_back(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
1242 if (DeviceValidationVersion() >= VK_API_VERSION_1_2) {
1243 features12.drawIndirectCount = VK_TRUE;
1244 }
1245 }
1246 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features12, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
1247 bool has_khr_indirect = DeviceExtensionEnabled(VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME);
1248 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
1249
1250 VkImageUsageFlags image_usage_combine = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT |
1251 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1252 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1253 VkImageObj image_c_a(m_device), image_c_b(m_device);
1254 const auto image_c_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_combine, VK_IMAGE_TILING_OPTIMAL);
1255 image_c_a.Init(image_c_ci);
1256 image_c_b.Init(image_c_ci);
1257
1258 VkImageView imageview_c = image_c_a.targetView(format);
1259 VkImageUsageFlags image_usage_storage =
1260 VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1261 VkImageObj image_s_a(m_device), image_s_b(m_device);
1262 const auto image_s_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_storage, VK_IMAGE_TILING_OPTIMAL);
1263 image_s_a.Init(image_s_ci);
1264 image_s_b.Init(image_s_ci);
1265 image_s_a.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1266 image_s_b.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1267
1268 VkImageView imageview_s = image_s_a.targetView(format);
1269
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001270 vk_testing::Sampler sampler_s, sampler_c;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001271 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001272 sampler_s.init(*m_device, sampler_ci);
1273 sampler_c.init(*m_device, sampler_ci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001274
1275 VkBufferObj buffer_a, buffer_b;
1276 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1277 VkBufferUsageFlags buffer_usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT |
1278 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1279 buffer_a.init(*m_device, buffer_a.create_info(2048, buffer_usage, nullptr), mem_prop);
1280 buffer_b.init(*m_device, buffer_b.create_info(2048, buffer_usage, nullptr), mem_prop);
1281
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001282 vk_testing::BufferView bufferview;
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001283 auto bvci = LvlInitStruct<VkBufferViewCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001284 bvci.buffer = buffer_a.handle();
1285 bvci.format = VK_FORMAT_R32_SFLOAT;
1286 bvci.offset = 0;
1287 bvci.range = VK_WHOLE_SIZE;
1288
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001289 bufferview.init(*m_device, bvci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001290
1291 OneOffDescriptorSet descriptor_set(m_device,
1292 {
1293 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
1294 {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
1295 {2, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
1296 {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
1297 });
1298
sfricke-samsung36428462021-02-10 01:23:34 -08001299 descriptor_set.WriteDescriptorBufferInfo(0, buffer_a.handle(), 0, 2048);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001300 descriptor_set.WriteDescriptorImageInfo(1, imageview_c, sampler_c.handle(), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
Jeremy Gebben170781d2020-11-19 16:21:21 -07001301 VK_IMAGE_LAYOUT_GENERAL);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001302 descriptor_set.WriteDescriptorImageInfo(2, imageview_s, sampler_s.handle(), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_IMAGE_LAYOUT_GENERAL);
1303 descriptor_set.WriteDescriptorBufferView(3, bufferview.handle());
Jeremy Gebben170781d2020-11-19 16:21:21 -07001304 descriptor_set.UpdateDescriptorSets();
1305
1306 // Dispatch
sjfricke394227a2022-06-20 16:47:38 +09001307 const char *csSource = R"glsl(
sfricke-samsung1c0b96a2021-07-08 22:24:09 -07001308 #version 450
1309 layout(set=0, binding=0) uniform foo { float x; } ub0;
1310 layout(set=0, binding=1) uniform sampler2D cis1;
1311 layout(set=0, binding=2, rgba8) uniform readonly image2D si2;
1312 layout(set=0, binding=3, r32f) uniform readonly imageBuffer stb3;
1313 void main(){
1314 vec4 vColor4;
1315 vColor4.x = ub0.x;
1316 vColor4 = texture(cis1, vec2(0));
1317 vColor4 = imageLoad(si2, ivec2(0));
1318 vColor4 = imageLoad(stb3, 0);
1319 }
1320 )glsl";
Jeremy Gebben170781d2020-11-19 16:21:21 -07001321
John Zulaufbe8562b2020-12-15 14:21:01 -07001322 VkEventObj event;
1323 event.init(*m_device, VkEventObj::create_info(0));
1324 VkEvent event_handle = event.handle();
1325
Jeremy Gebben170781d2020-11-19 16:21:21 -07001326 CreateComputePipelineHelper pipe(*this);
1327 pipe.InitInfo();
sfricke-samsungae54c1e2022-01-21 05:35:21 -08001328 pipe.cs_.reset(new VkShaderObj(this, csSource, VK_SHADER_STAGE_COMPUTE_BIT));
Jeremy Gebben170781d2020-11-19 16:21:21 -07001329 pipe.InitState();
1330 pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
1331 pipe.CreateComputePipeline();
1332
1333 m_commandBuffer->begin();
1334
1335 VkBufferCopy buffer_region = {0, 0, 2048};
1336 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_a.handle(), 1, &buffer_region);
1337
1338 VkImageSubresourceLayers layer{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1339 VkOffset3D zero_offset{0, 0, 0};
1340 VkExtent3D full_extent{16, 16, 1};
1341 VkImageCopy image_region = {layer, zero_offset, layer, zero_offset, full_extent};
1342 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
1343 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
1344 vk::CmdCopyImage(m_commandBuffer->handle(), image_s_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s_a.handle(),
1345 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
1346
1347 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1348 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1349 &descriptor_set.set_, 0, nullptr);
1350
1351 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1352 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1353 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1354 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1355 vk::CmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
1356 m_errorMonitor->VerifyFound();
1357
1358 m_commandBuffer->end();
1359 m_commandBuffer->reset();
1360 m_commandBuffer->begin();
1361
1362 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1363 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1364 &descriptor_set.set_, 0, nullptr);
1365 vk::CmdDispatch(m_commandBuffer->handle(), 1, 1, 1);
1366
1367 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1368 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_b.handle(), buffer_a.handle(), 1, &buffer_region);
1369 m_errorMonitor->VerifyFound();
1370
1371 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1372 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
1373 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
1374 m_errorMonitor->VerifyFound();
1375
1376 m_commandBuffer->end();
1377 m_commandBuffer->reset();
1378
1379 // DispatchIndirect
Jeremy Gebben170781d2020-11-19 16:21:21 -07001380 VkBufferObj buffer_dispatchIndirect, buffer_dispatchIndirect2;
1381 buffer_usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1382 buffer_dispatchIndirect.init(
1383 *m_device, buffer_dispatchIndirect.create_info(sizeof(VkDispatchIndirectCommand), buffer_usage, nullptr), mem_prop);
1384 buffer_dispatchIndirect2.init(
1385 *m_device, buffer_dispatchIndirect2.create_info(sizeof(VkDispatchIndirectCommand), buffer_usage, nullptr), mem_prop);
1386 m_commandBuffer->begin();
1387 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1388 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1389 &descriptor_set.set_, 0, nullptr);
1390 vk::CmdDispatchIndirect(m_commandBuffer->handle(), buffer_dispatchIndirect.handle(), 0);
1391 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001392
1393 m_commandBuffer->reset();
1394 m_commandBuffer->begin();
1395
1396 buffer_region = {0, 0, sizeof(VkDispatchIndirectCommand)};
1397 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_dispatchIndirect2.handle(), buffer_dispatchIndirect.handle(), 1,
1398 &buffer_region);
1399 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_);
1400 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_COMPUTE, pipe.pipeline_layout_.handle(), 0, 1,
1401 &descriptor_set.set_, 0, nullptr);
1402 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1403 vk::CmdDispatchIndirect(m_commandBuffer->handle(), buffer_dispatchIndirect.handle(), 0);
1404 m_errorMonitor->VerifyFound();
1405 m_commandBuffer->end();
1406
1407 // Draw
Jeremy Gebben170781d2020-11-19 16:21:21 -07001408 const float vbo_data[3] = {1.f, 0.f, 1.f};
1409 VkVertexInputAttributeDescription VertexInputAttributeDescription = {0, 0, VK_FORMAT_R32G32B32_SFLOAT, sizeof(vbo_data)};
1410 VkVertexInputBindingDescription VertexInputBindingDescription = {0, sizeof(vbo_data), VK_VERTEX_INPUT_RATE_VERTEX};
1411 VkBufferObj vbo, vbo2;
1412 buffer_usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1413 vbo.init(*m_device, vbo.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
1414 vbo2.init(*m_device, vbo2.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
1415
sfricke-samsungae54c1e2022-01-21 05:35:21 -08001416 VkShaderObj vs(this, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT);
1417 VkShaderObj fs(this, csSource, VK_SHADER_STAGE_FRAGMENT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001418
1419 CreatePipelineHelper g_pipe(*this);
1420 g_pipe.InitInfo();
1421 g_pipe.InitState();
1422 g_pipe.vi_ci_.pVertexBindingDescriptions = &VertexInputBindingDescription;
1423 g_pipe.vi_ci_.vertexBindingDescriptionCount = 1;
1424 g_pipe.vi_ci_.pVertexAttributeDescriptions = &VertexInputAttributeDescription;
1425 g_pipe.vi_ci_.vertexAttributeDescriptionCount = 1;
1426 g_pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
1427 g_pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
1428 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
1429
1430 m_commandBuffer->reset();
1431 m_commandBuffer->begin();
1432 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1433 VkDeviceSize offset = 0;
1434 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1435
1436 VkViewport viewport = {0, 0, 16, 16, 0, 1};
1437 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1438 VkRect2D scissor = {{0, 0}, {16, 16}};
1439 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1440
1441 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1442 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1443 &descriptor_set.set_, 0, nullptr);
1444 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1445 m_commandBuffer->EndRenderPass();
1446 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001447
1448 m_commandBuffer->reset();
1449 m_commandBuffer->begin();
1450
1451 buffer_region = {0, 0, sizeof(vbo_data)};
1452 vk::CmdCopyBuffer(m_commandBuffer->handle(), vbo2.handle(), vbo.handle(), 1, &buffer_region);
1453
1454 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1455 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1456 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1457 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1458 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1459 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1460 &descriptor_set.set_, 0, nullptr);
1461
1462 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1463 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1464 m_errorMonitor->VerifyFound();
1465
1466 m_commandBuffer->EndRenderPass();
1467 m_commandBuffer->end();
1468
John Zulaufbe8562b2020-12-15 14:21:01 -07001469 // Repeat the draw test with a WaitEvent to protect it.
John Zulaufbe8562b2020-12-15 14:21:01 -07001470 m_commandBuffer->reset();
1471 m_commandBuffer->begin();
1472
1473 vk::CmdCopyBuffer(m_commandBuffer->handle(), vbo2.handle(), vbo.handle(), 1, &buffer_region);
1474
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001475 auto vbo_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
John Zulaufbe8562b2020-12-15 14:21:01 -07001476 vbo_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
1477 vbo_barrier.dstAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
1478 vbo_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1479 vbo_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
1480 vbo_barrier.buffer = vbo.handle();
1481 vbo_barrier.offset = buffer_region.dstOffset;
1482 vbo_barrier.size = buffer_region.size;
1483
1484 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
1485
1486 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1487 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1488 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1489 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1490 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1491 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1492 &descriptor_set.set_, 0, nullptr);
1493
1494 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, 0, nullptr, 1,
1495 &vbo_barrier, 0, nullptr);
1496 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1497
1498 m_commandBuffer->EndRenderPass();
1499 m_commandBuffer->end();
John Zulaufbe8562b2020-12-15 14:21:01 -07001500
Jeremy Gebben170781d2020-11-19 16:21:21 -07001501 // DrawIndexed
Jeremy Gebben170781d2020-11-19 16:21:21 -07001502 const float ibo_data[3] = {0.f, 0.f, 0.f};
1503 VkBufferObj ibo, ibo2;
1504 buffer_usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1505 ibo.init(*m_device, ibo.create_info(sizeof(ibo_data), buffer_usage, nullptr), mem_prop);
1506 ibo2.init(*m_device, ibo2.create_info(sizeof(ibo_data), buffer_usage, nullptr), mem_prop);
1507
1508 m_commandBuffer->reset();
1509 m_commandBuffer->begin();
1510 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1511 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1512 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1513 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1514 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1515
1516 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1517 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1518 &descriptor_set.set_, 0, nullptr);
1519 m_commandBuffer->DrawIndexed(3, 1, 0, 0, 0);
1520 m_commandBuffer->EndRenderPass();
1521 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001522
1523 m_commandBuffer->reset();
1524 m_commandBuffer->begin();
1525
1526 buffer_region = {0, 0, sizeof(ibo_data)};
1527 vk::CmdCopyBuffer(m_commandBuffer->handle(), ibo2.handle(), ibo.handle(), 1, &buffer_region);
1528
1529 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1530 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1531 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1532 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1533 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1534 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1535 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1536 &descriptor_set.set_, 0, nullptr);
1537
1538 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1539 m_commandBuffer->DrawIndexed(3, 1, 0, 0, 0);
1540 m_errorMonitor->VerifyFound();
1541
1542 m_commandBuffer->EndRenderPass();
1543 m_commandBuffer->end();
1544
1545 // DrawIndirect
Jeremy Gebben170781d2020-11-19 16:21:21 -07001546 VkBufferObj buffer_drawIndirect, buffer_drawIndirect2;
1547 buffer_usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1548 buffer_drawIndirect.init(*m_device, buffer_drawIndirect.create_info(sizeof(VkDrawIndirectCommand), buffer_usage, nullptr),
1549 mem_prop);
1550 buffer_drawIndirect2.init(*m_device, buffer_drawIndirect2.create_info(sizeof(VkDrawIndirectCommand), buffer_usage, nullptr),
1551 mem_prop);
1552
1553 m_commandBuffer->reset();
1554 m_commandBuffer->begin();
1555 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1556 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1557 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1558 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1559
1560 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1561 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1562 &descriptor_set.set_, 0, nullptr);
1563 vk::CmdDrawIndirect(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, 1, sizeof(VkDrawIndirectCommand));
1564 m_commandBuffer->EndRenderPass();
1565 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001566
1567 m_commandBuffer->reset();
1568 m_commandBuffer->begin();
1569
1570 buffer_region = {0, 0, sizeof(VkDrawIndirectCommand)};
1571 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_drawIndirect2.handle(), buffer_drawIndirect.handle(), 1, &buffer_region);
1572
1573 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1574 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1575 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1576 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1577 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1578 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1579 &descriptor_set.set_, 0, nullptr);
1580
1581 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1582 vk::CmdDrawIndirect(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, 1, sizeof(VkDrawIndirectCommand));
1583 m_errorMonitor->VerifyFound();
1584
1585 m_commandBuffer->EndRenderPass();
1586 m_commandBuffer->end();
1587
1588 // DrawIndexedIndirect
Jeremy Gebben170781d2020-11-19 16:21:21 -07001589 VkBufferObj buffer_drawIndexedIndirect, buffer_drawIndexedIndirect2;
1590 buffer_usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1591 buffer_drawIndexedIndirect.init(
1592 *m_device, buffer_drawIndexedIndirect.create_info(sizeof(VkDrawIndexedIndirectCommand), buffer_usage, nullptr), mem_prop);
1593 buffer_drawIndexedIndirect2.init(
1594 *m_device, buffer_drawIndexedIndirect2.create_info(sizeof(VkDrawIndexedIndirectCommand), buffer_usage, nullptr), mem_prop);
1595
1596 m_commandBuffer->reset();
1597 m_commandBuffer->begin();
1598 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1599 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1600 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1601 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1602 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1603
1604 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1605 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1606 &descriptor_set.set_, 0, nullptr);
1607 vk::CmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, 1, sizeof(VkDrawIndexedIndirectCommand));
1608 m_commandBuffer->EndRenderPass();
1609 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001610
1611 m_commandBuffer->reset();
1612 m_commandBuffer->begin();
1613
1614 buffer_region = {0, 0, sizeof(VkDrawIndexedIndirectCommand)};
1615 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_drawIndexedIndirect2.handle(), buffer_drawIndexedIndirect.handle(), 1,
1616 &buffer_region);
1617
1618 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1619 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1620 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1621 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1622 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1623 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1624 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
1625 &descriptor_set.set_, 0, nullptr);
1626
1627 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1628 vk::CmdDrawIndexedIndirect(m_commandBuffer->handle(), buffer_drawIndexedIndirect.handle(), 0, 1,
1629 sizeof(VkDrawIndexedIndirectCommand));
1630 m_errorMonitor->VerifyFound();
1631
1632 m_commandBuffer->EndRenderPass();
1633 m_commandBuffer->end();
1634
1635 if (has_khr_indirect) {
1636 // DrawIndirectCount
1637 auto fpCmdDrawIndirectCountKHR =
1638 (PFN_vkCmdDrawIndirectCount)vk::GetDeviceProcAddr(m_device->device(), "vkCmdDrawIndirectCountKHR");
1639 if (!fpCmdDrawIndirectCountKHR) {
1640 printf("%s Test requires unsupported vkCmdDrawIndirectCountKHR feature. Skipped.\n", kSkipPrefix);
1641 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -07001642 VkBufferObj buffer_count, buffer_count2;
1643 buffer_usage =
1644 VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1645 buffer_count.init(*m_device, buffer_count.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1646 buffer_count2.init(*m_device, buffer_count2.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1647
1648 m_commandBuffer->reset();
1649 m_commandBuffer->begin();
1650 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1651 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1652 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1653 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1654
1655 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1656 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1657 0, 1, &descriptor_set.set_, 0, nullptr);
1658 fpCmdDrawIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, buffer_count.handle(), 0, 1,
1659 sizeof(VkDrawIndirectCommand));
1660 m_commandBuffer->EndRenderPass();
1661 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001662
1663 m_commandBuffer->reset();
1664 m_commandBuffer->begin();
1665
1666 buffer_region = {0, 0, sizeof(uint32_t)};
1667 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_count2.handle(), buffer_count.handle(), 1, &buffer_region);
1668
1669 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1670 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1671 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1672 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1673 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1674 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1675 0, 1, &descriptor_set.set_, 0, nullptr);
1676
1677 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1678 fpCmdDrawIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndirect.handle(), 0, buffer_count.handle(), 0, 1,
1679 sizeof(VkDrawIndirectCommand));
1680 m_errorMonitor->VerifyFound();
1681
1682 m_commandBuffer->EndRenderPass();
1683 m_commandBuffer->end();
1684 }
1685
1686 // DrawIndexedIndirectCount
1687 auto fpCmdDrawIndexIndirectCountKHR =
1688 (PFN_vkCmdDrawIndirectCount)vk::GetDeviceProcAddr(m_device->device(), "vkCmdDrawIndexedIndirectCountKHR");
1689 if (!fpCmdDrawIndexIndirectCountKHR) {
1690 printf("%s Test requires unsupported vkCmdDrawIndexedIndirectCountKHR feature. Skipped.\n", kSkipPrefix);
1691 } else {
Jeremy Gebben170781d2020-11-19 16:21:21 -07001692 VkBufferObj buffer_count, buffer_count2;
1693 buffer_usage =
1694 VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
1695 buffer_count.init(*m_device, buffer_count.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1696 buffer_count2.init(*m_device, buffer_count2.create_info(sizeof(uint32_t), buffer_usage, nullptr), mem_prop);
1697
1698 m_commandBuffer->reset();
1699 m_commandBuffer->begin();
1700 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1701 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1702 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1703 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1704 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1705
1706 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1707 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1708 0, 1, &descriptor_set.set_, 0, nullptr);
1709 fpCmdDrawIndexIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndexedIndirect.handle(), 0, buffer_count.handle(),
1710 0, 1, sizeof(VkDrawIndexedIndirectCommand));
1711 m_commandBuffer->EndRenderPass();
1712 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001713
1714 m_commandBuffer->reset();
1715 m_commandBuffer->begin();
1716
1717 buffer_region = {0, 0, sizeof(uint32_t)};
1718 vk::CmdCopyBuffer(m_commandBuffer->handle(), buffer_count2.handle(), buffer_count.handle(), 1, &buffer_region);
1719
1720 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1721 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
1722 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), ibo.handle(), 0, VK_INDEX_TYPE_UINT16);
1723 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
1724 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
1725 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
1726 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(),
1727 0, 1, &descriptor_set.set_, 0, nullptr);
1728
1729 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
1730 fpCmdDrawIndexIndirectCountKHR(m_commandBuffer->handle(), buffer_drawIndexedIndirect.handle(), 0, buffer_count.handle(),
1731 0, 1, sizeof(VkDrawIndexedIndirectCommand));
1732 m_errorMonitor->VerifyFound();
1733
1734 m_commandBuffer->EndRenderPass();
1735 m_commandBuffer->end();
1736 }
1737 } else {
1738 printf("%s Test requires unsupported vkCmdDrawIndirectCountKHR & vkDrawIndexedIndirectCountKHR feature. Skipped.\n",
1739 kSkipPrefix);
1740 }
1741}
1742
1743TEST_F(VkSyncValTest, SyncCmdClear) {
1744 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1745 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
1746 // CmdClearColorImage
Jeremy Gebben170781d2020-11-19 16:21:21 -07001747 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1748 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
1749 VkImageObj image_a(m_device), image_b(m_device);
1750 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
1751 image_a.Init(image_ci);
1752 image_b.Init(image_ci);
1753
1754 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
1755 VkOffset3D zero_offset{0, 0, 0};
1756 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
1757 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
1758
1759 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
1760
1761 m_commandBuffer->begin();
1762
1763 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1764 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
1765
1766 auto cb = m_commandBuffer->handle();
1767 VkClearColorValue ccv = {};
1768 vk::CmdClearColorImage(m_commandBuffer->handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
1769 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001770
1771 m_commandBuffer->reset();
1772 m_commandBuffer->begin();
1773 vk::CmdCopyImage(cb, image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &full_region);
1774
1775 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1776 vk::CmdClearColorImage(m_commandBuffer->handle(), image_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
1777 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1778 vk::CmdClearColorImage(m_commandBuffer->handle(), image_b.handle(), VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
1779 m_errorMonitor->VerifyFound();
1780
1781 m_commandBuffer->end();
1782
1783 // CmdClearDepthStencilImage
1784 format = FindSupportedDepthStencilFormat(gpu());
1785 if (!format) {
1786 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
1787 return;
1788 }
Jeremy Gebben170781d2020-11-19 16:21:21 -07001789 VkImageObj image_ds_a(m_device), image_ds_b(m_device);
1790 image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
1791 image_ds_a.Init(image_ci);
1792 image_ds_b.Init(image_ci);
1793
1794 const VkImageAspectFlags ds_aspect = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
1795 image_ds_a.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_GENERAL);
1796 image_ds_b.SetLayout(ds_aspect, VK_IMAGE_LAYOUT_GENERAL);
1797
1798 m_commandBuffer->begin();
1799 const VkClearDepthStencilValue clear_value = {};
1800 VkImageSubresourceRange ds_range = {ds_aspect, 0, 1, 0, 1};
1801
1802 vk::CmdClearDepthStencilImage(cb, image_ds_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1, &ds_range);
1803 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001804
1805 VkImageSubresourceLayers ds_layers_all{ds_aspect, 0, 0, 1};
1806 VkImageCopy ds_full_region = {ds_layers_all, zero_offset, ds_layers_all, zero_offset, full_extent};
1807
1808 m_commandBuffer->reset();
1809 m_commandBuffer->begin();
1810 vk::CmdCopyImage(cb, image_ds_a.handle(), VK_IMAGE_LAYOUT_GENERAL, image_ds_b.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
1811 &ds_full_region);
1812
1813 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
1814 vk::CmdClearDepthStencilImage(m_commandBuffer->handle(), image_ds_a.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1,
1815 &ds_range);
1816 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
1817 vk::CmdClearDepthStencilImage(m_commandBuffer->handle(), image_ds_b.handle(), VK_IMAGE_LAYOUT_GENERAL, &clear_value, 1,
1818 &ds_range);
1819 m_errorMonitor->VerifyFound();
1820
1821 m_commandBuffer->end();
1822}
1823
1824TEST_F(VkSyncValTest, SyncCmdQuery) {
1825 // CmdCopyQueryPoolResults
Jeremy Gebben170781d2020-11-19 16:21:21 -07001826 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1827 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
1828 if (IsPlatform(kNexusPlayer)) {
1829 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
1830 return;
1831 }
1832 if ((m_device->queue_props.empty()) || (m_device->queue_props[0].queueCount < 2)) {
1833 printf("%s Queue family needs to have multiple queues to run this test.\n", kSkipPrefix);
1834 return;
1835 }
1836 uint32_t queue_count;
1837 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, NULL);
Jeremy Gebbend2573fc2021-05-12 17:17:38 -06001838 std::vector<VkQueueFamilyProperties> queue_props(queue_count);
1839 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, queue_props.data());
Jeremy Gebben170781d2020-11-19 16:21:21 -07001840 if (queue_props[m_device->graphics_queue_node_index_].timestampValidBits == 0) {
1841 printf("%s Device graphic queue has timestampValidBits of 0, skipping.\n", kSkipPrefix);
1842 return;
1843 }
1844
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001845 vk_testing::QueryPool query_pool;
sfricke-samsung6fc3e322022-02-15 22:41:29 -08001846 VkQueryPoolCreateInfo query_pool_create_info = LvlInitStruct<VkQueryPoolCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001847 query_pool_create_info.queryType = VK_QUERY_TYPE_TIMESTAMP;
1848 query_pool_create_info.queryCount = 1;
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001849 query_pool.init(*m_device, query_pool_create_info);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001850
1851 VkBufferObj buffer_a, buffer_b;
1852 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
1853 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
1854 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
1855
1856 VkBufferCopy region = {0, 0, 256};
1857
1858 auto cb = m_commandBuffer->handle();
1859 m_commandBuffer->begin();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001860 vk::CmdResetQueryPool(cb, query_pool.handle(), 0, 1);
1861 vk::CmdWriteTimestamp(cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool.handle(), 0);
1862 vk::CmdCopyQueryPoolResults(cb, query_pool.handle(), 0, 1, buffer_a.handle(), 0, 0, VK_QUERY_RESULT_WAIT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001863 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001864
1865 m_commandBuffer->reset();
1866 m_commandBuffer->begin();
1867 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001868 vk::CmdResetQueryPool(cb, query_pool.handle(), 0, 1);
1869 vk::CmdWriteTimestamp(cb, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, query_pool.handle(), 0);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001870 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001871 vk::CmdCopyQueryPoolResults(cb, query_pool.handle(), 0, 1, buffer_a.handle(), 0, 256, VK_QUERY_RESULT_WAIT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001872 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001873 vk::CmdCopyQueryPoolResults(cb, query_pool.handle(), 0, 1, buffer_b.handle(), 0, 256, VK_QUERY_RESULT_WAIT_BIT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001874 m_commandBuffer->end();
1875 m_errorMonitor->VerifyFound();
1876
1877 // TODO:Track VkQueryPool
1878 // TODO:CmdWriteTimestamp
Jeremy Gebben170781d2020-11-19 16:21:21 -07001879}
1880
1881TEST_F(VkSyncValTest, SyncCmdDrawDepthStencil) {
1882 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
1883 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
Jeremy Gebben170781d2020-11-19 16:21:21 -07001884
1885 const auto format_ds = FindSupportedDepthStencilFormat(gpu());
1886 if (!format_ds) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06001887 GTEST_SKIP() << "No Depth + Stencil format found. Skipped.";
Jeremy Gebben170781d2020-11-19 16:21:21 -07001888 }
1889 const auto format_dp = FindSupportedDepthOnlyFormat(gpu());
1890 if (!format_dp) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06001891 GTEST_SKIP() << "No only Depth format found. Skipped.";
Jeremy Gebben170781d2020-11-19 16:21:21 -07001892 }
1893 const auto format_st = FindSupportedStencilOnlyFormat(gpu());
1894 if (!format_st) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06001895 GTEST_SKIP() << "No only Stencil format found. Skipped.";
Jeremy Gebben170781d2020-11-19 16:21:21 -07001896 }
1897
1898 VkDepthStencilObj image_ds(m_device), image_dp(m_device), image_st(m_device);
1899 image_ds.Init(m_device, 16, 16, format_ds);
1900 image_dp.Init(m_device, 16, 16, format_dp);
1901 image_st.Init(m_device, 16, 16, format_st);
1902
1903 VkRenderpassObj rp_ds(m_device, format_ds, true), rp_dp(m_device, format_dp, true), rp_st(m_device, format_st, true);
1904
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001905 vk_testing::Framebuffer fb_ds, fb_dp, fb_st;
Jeremy Gebben170781d2020-11-19 16:21:21 -07001906 VkFramebufferCreateInfo fbci = {
1907 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp_ds.handle(), 1, image_ds.BindInfo(), 16, 16, 1};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001908 fb_ds.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001909 fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp_dp.handle(), 1, image_dp.BindInfo(), 16, 16, 1};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001910 fb_dp.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001911 fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp_st.handle(), 1, image_st.BindInfo(), 16, 16, 1};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001912 fb_st.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07001913
1914 VkStencilOpState stencil = {};
1915 stencil.failOp = VK_STENCIL_OP_KEEP;
1916 stencil.passOp = VK_STENCIL_OP_KEEP;
1917 stencil.depthFailOp = VK_STENCIL_OP_KEEP;
1918 stencil.compareOp = VK_COMPARE_OP_NEVER;
1919
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07001920 auto ds_ci = LvlInitStruct<VkPipelineDepthStencilStateCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001921 ds_ci.depthTestEnable = VK_TRUE;
1922 ds_ci.depthWriteEnable = VK_TRUE;
1923 ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
1924 ds_ci.stencilTestEnable = VK_TRUE;
1925 ds_ci.front = stencil;
1926 ds_ci.back = stencil;
1927
1928 CreatePipelineHelper g_pipe_ds(*this), g_pipe_dp(*this), g_pipe_st(*this);
1929 g_pipe_ds.InitInfo();
1930 g_pipe_ds.gp_ci_.renderPass = rp_ds.handle();
1931 g_pipe_ds.gp_ci_.pDepthStencilState = &ds_ci;
1932 g_pipe_ds.InitState();
1933 ASSERT_VK_SUCCESS(g_pipe_ds.CreateGraphicsPipeline());
1934 g_pipe_dp.InitInfo();
1935 g_pipe_dp.gp_ci_.renderPass = rp_dp.handle();
1936 ds_ci.stencilTestEnable = VK_FALSE;
1937 g_pipe_dp.gp_ci_.pDepthStencilState = &ds_ci;
1938 g_pipe_dp.InitState();
1939 ASSERT_VK_SUCCESS(g_pipe_dp.CreateGraphicsPipeline());
1940 g_pipe_st.InitInfo();
1941 g_pipe_st.gp_ci_.renderPass = rp_st.handle();
1942 ds_ci.depthTestEnable = VK_FALSE;
1943 ds_ci.stencilTestEnable = VK_TRUE;
1944 g_pipe_st.gp_ci_.pDepthStencilState = &ds_ci;
1945 g_pipe_st.InitState();
1946 ASSERT_VK_SUCCESS(g_pipe_st.CreateGraphicsPipeline());
1947
1948 m_commandBuffer->begin();
1949 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
1950 m_renderPassBeginInfo.pClearValues = nullptr;
1951 m_renderPassBeginInfo.clearValueCount = 0;
1952
1953 m_renderPassBeginInfo.renderPass = rp_ds.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001954 m_renderPassBeginInfo.framebuffer = fb_ds.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001955 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1956 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_ds.pipeline_);
1957 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1958 m_commandBuffer->EndRenderPass();
1959
1960 m_renderPassBeginInfo.renderPass = rp_dp.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001961 m_renderPassBeginInfo.framebuffer = fb_dp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001962 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1963 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_dp.pipeline_);
1964 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1965 m_commandBuffer->EndRenderPass();
1966
1967 m_renderPassBeginInfo.renderPass = rp_st.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06001968 m_renderPassBeginInfo.framebuffer = fb_st.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001969 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
1970 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_st.pipeline_);
1971 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
1972 m_commandBuffer->EndRenderPass();
1973
1974 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07001975
1976 m_commandBuffer->reset();
1977 m_commandBuffer->begin();
1978
1979 VkImageCopy copyRegion;
1980 copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1981 copyRegion.srcSubresource.mipLevel = 0;
1982 copyRegion.srcSubresource.baseArrayLayer = 0;
1983 copyRegion.srcSubresource.layerCount = 1;
1984 copyRegion.srcOffset = {0, 0, 0};
1985 copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
1986 copyRegion.dstSubresource.mipLevel = 0;
1987 copyRegion.dstSubresource.baseArrayLayer = 0;
1988 copyRegion.dstSubresource.layerCount = 1;
1989 copyRegion.dstOffset = {0, 0, 0};
1990 copyRegion.extent = {16, 16, 1};
1991
1992 m_commandBuffer->CopyImage(image_ds.handle(), VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, image_dp.handle(),
1993 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, 1, &copyRegion);
1994
1995 copyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1996 copyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
1997 m_commandBuffer->CopyImage(image_ds.handle(), VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, image_st.handle(),
1998 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, 1, &copyRegion);
1999 m_renderPassBeginInfo.renderPass = rp_ds.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002000 m_renderPassBeginInfo.framebuffer = fb_ds.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002001 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
2002 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2003 m_errorMonitor->VerifyFound();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002004
2005 m_renderPassBeginInfo.renderPass = rp_dp.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002006 m_renderPassBeginInfo.framebuffer = fb_dp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002007 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2008 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2009 m_errorMonitor->VerifyFound();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002010
2011 m_renderPassBeginInfo.renderPass = rp_st.handle();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002012 m_renderPassBeginInfo.framebuffer = fb_st.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002013 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2014 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2015 m_errorMonitor->VerifyFound();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002016}
2017
John Zulaufd57a36b2021-08-16 10:34:44 -06002018
Jeremy Gebben170781d2020-11-19 16:21:21 -07002019TEST_F(VkSyncValTest, RenderPassLoadHazardVsInitialLayout) {
2020 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
John Zulaufd57a36b2021-08-16 10:34:44 -06002021 bool do_none_load_op_test = false;
2022 if (DeviceExtensionSupported(gpu(), nullptr, VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME)) {
2023 m_device_extension_names.push_back(VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME);
2024 do_none_load_op_test = true;
2025 }
2026
Jeremy Gebben170781d2020-11-19 16:21:21 -07002027 ASSERT_NO_FATAL_FAILURE(InitState());
2028 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2029
2030 VkImageUsageFlags usage_color = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2031 VkImageUsageFlags usage_input = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2032 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
2033 VkImageObj image_color(m_device), image_input(m_device);
2034 auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 1, format, usage_color, VK_IMAGE_TILING_OPTIMAL);
2035 image_color.Init(image_ci);
2036 image_ci.usage = usage_input;
2037 image_input.Init(image_ci);
2038 VkImageView attachments[] = {image_color.targetView(format), image_input.targetView(format)};
2039
John Zulaufd57a36b2021-08-16 10:34:44 -06002040 VkAttachmentDescription attachmentDescriptions[] = {
Jeremy Gebben170781d2020-11-19 16:21:21 -07002041 // Result attachment
2042 {(VkAttachmentDescriptionFlags)0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_CLEAR,
2043 VK_ATTACHMENT_STORE_OP_STORE, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
2044 VK_IMAGE_LAYOUT_UNDEFINED, // Here causes DesiredError that SYNC-HAZARD-NONE in BeginRenderPass.
2045 // It should be VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
2046 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL},
2047 // Input attachment
2048 {(VkAttachmentDescriptionFlags)0, VK_FORMAT_R8G8B8A8_UNORM, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_LOAD,
2049 VK_ATTACHMENT_STORE_OP_STORE, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_ATTACHMENT_STORE_OP_DONT_CARE,
2050 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL}};
2051
2052 const VkAttachmentReference resultAttachmentRef = {0u, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2053 const VkAttachmentReference inputAttachmentRef = {1u, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
2054
2055 const VkSubpassDescription subpassDescription = {(VkSubpassDescriptionFlags)0,
2056 VK_PIPELINE_BIND_POINT_GRAPHICS,
2057 1u,
2058 &inputAttachmentRef,
2059 1u,
2060 &resultAttachmentRef,
2061 0,
2062 0,
2063 0u,
2064 0};
2065
2066 const VkSubpassDependency subpassDependency = {VK_SUBPASS_EXTERNAL,
2067 0,
2068 VK_PIPELINE_STAGE_TRANSFER_BIT,
2069 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
2070 VK_ACCESS_TRANSFER_WRITE_BIT,
2071 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT | VK_ACCESS_SHADER_READ_BIT,
2072 VK_DEPENDENCY_BY_REGION_BIT};
2073
2074 const VkRenderPassCreateInfo renderPassInfo = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
2075 0,
2076 (VkRenderPassCreateFlags)0,
2077 2u,
2078 attachmentDescriptions,
2079 1u,
2080 &subpassDescription,
2081 1u,
2082 &subpassDependency};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002083 vk_testing::RenderPass rp;
2084 rp.init(*m_device, renderPassInfo);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002085
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002086 vk_testing::Framebuffer fb;
2087 VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0, rp.handle(), 2, attachments, 32, 32, 1};
2088 fb.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002089
2090 image_input.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
2091
2092 m_commandBuffer->begin();
2093
2094 m_renderPassBeginInfo.renderArea = {{0, 0}, {32, 32}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002095 m_renderPassBeginInfo.renderPass = rp.handle();
2096 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002097
2098 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
2099 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2100 // Even though we have no accesses prior, the layout transition *is* an access, so load can be validated vs. layout transition
2101 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2102 m_errorMonitor->VerifyFound();
John Zulaufd57a36b2021-08-16 10:34:44 -06002103
2104 vk_testing::RenderPass rp_no_load_store;
2105 if (do_none_load_op_test) {
John Zulaufd57a36b2021-08-16 10:34:44 -06002106 attachmentDescriptions[0].loadOp = VK_ATTACHMENT_LOAD_OP_NONE_EXT;
2107 attachmentDescriptions[0].storeOp = VK_ATTACHMENT_STORE_OP_NONE_EXT;
2108 attachmentDescriptions[1].loadOp = VK_ATTACHMENT_LOAD_OP_NONE_EXT;
2109 attachmentDescriptions[1].storeOp = VK_ATTACHMENT_STORE_OP_NONE_EXT;
2110 rp_no_load_store.init(*m_device, renderPassInfo);
2111 m_renderPassBeginInfo.renderPass = rp_no_load_store.handle();
2112 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2113 m_commandBuffer->EndRenderPass();
John Zulaufd57a36b2021-08-16 10:34:44 -06002114 } else {
2115 printf("%s VK_EXT_load_store_op_none not supported, skipping sub-test\n", kSkipPrefix);
2116 }
Jeremy Gebben170781d2020-11-19 16:21:21 -07002117}
2118
2119TEST_F(VkSyncValTest, SyncRenderPassWithWrongDepthStencilInitialLayout) {
2120 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2121 ASSERT_NO_FATAL_FAILURE(InitState());
2122 if (IsPlatform(kNexusPlayer)) {
2123 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
2124 return;
2125 }
2126
2127 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
2128
2129 VkFormat color_format = VK_FORMAT_R8G8B8A8_UNORM;
2130 VkFormat ds_format = FindSupportedDepthStencilFormat(gpu());
2131 if (!ds_format) {
2132 printf("%s No Depth + Stencil format found. Skipped.\n", kSkipPrefix);
2133 return;
2134 }
2135 VkImageUsageFlags usage_color = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2136 VkImageUsageFlags usage_ds = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
2137 VkImageObj image_color(m_device), image_color2(m_device);
2138 auto image_ci = VkImageObj::ImageCreateInfo2D(32, 32, 1, 1, color_format, usage_color, VK_IMAGE_TILING_OPTIMAL);
2139 image_color.Init(image_ci);
2140 image_color2.Init(image_ci);
2141 VkDepthStencilObj image_ds(m_device);
2142 image_ds.Init(m_device, 32, 32, ds_format, usage_ds);
2143
2144 const VkAttachmentDescription colorAttachmentDescription = {(VkAttachmentDescriptionFlags)0,
2145 color_format,
2146 VK_SAMPLE_COUNT_1_BIT,
2147 VK_ATTACHMENT_LOAD_OP_CLEAR,
2148 VK_ATTACHMENT_STORE_OP_STORE,
2149 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
2150 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2151 VK_IMAGE_LAYOUT_UNDEFINED,
2152 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2153
2154 const VkAttachmentDescription depthStencilAttachmentDescription = {
2155 (VkAttachmentDescriptionFlags)0, ds_format, VK_SAMPLE_COUNT_1_BIT, VK_ATTACHMENT_LOAD_OP_CLEAR,
2156 VK_ATTACHMENT_STORE_OP_STORE, VK_ATTACHMENT_LOAD_OP_CLEAR, VK_ATTACHMENT_STORE_OP_STORE,
2157 VK_IMAGE_LAYOUT_UNDEFINED, // Here causes DesiredError that SYNC-HAZARD-WRITE_AFTER_WRITE in BeginRenderPass.
2158 // It should be VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
2159 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
2160
2161 std::vector<VkAttachmentDescription> attachmentDescriptions;
2162 attachmentDescriptions.push_back(colorAttachmentDescription);
2163 attachmentDescriptions.push_back(depthStencilAttachmentDescription);
2164
2165 const VkAttachmentReference colorAttachmentRef = {0u, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2166
2167 const VkAttachmentReference depthStencilAttachmentRef = {1u, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
2168
2169 const VkSubpassDescription subpassDescription = {(VkSubpassDescriptionFlags)0,
2170 VK_PIPELINE_BIND_POINT_GRAPHICS,
2171 0u,
2172 0,
2173 1u,
2174 &colorAttachmentRef,
2175 0,
2176 &depthStencilAttachmentRef,
2177 0u,
2178 0};
2179
2180 const VkRenderPassCreateInfo renderPassInfo = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
2181 0,
2182 (VkRenderPassCreateFlags)0,
2183 (uint32_t)attachmentDescriptions.size(),
2184 &attachmentDescriptions[0],
2185 1u,
2186 &subpassDescription,
2187 0u,
2188 0};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002189 vk_testing::RenderPass rp;
2190 rp.init(*m_device, renderPassInfo);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002191
2192 VkImageView fb_attachments[] = {image_color.targetView(color_format),
2193 image_ds.targetView(ds_format, VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)};
2194 const VkFramebufferCreateInfo fbci = {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002195 VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, 0, 0u, rp.handle(), 2u, fb_attachments, 32, 32, 1u,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002196 };
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002197 vk_testing::Framebuffer fb;
2198 fb.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002199 fb_attachments[0] = image_color2.targetView(color_format);
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002200 vk_testing::Framebuffer fb1;
2201 fb1.init(*m_device, fbci);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002202
2203 CreatePipelineHelper g_pipe(*this);
2204 g_pipe.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002205 g_pipe.gp_ci_.renderPass = rp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002206
2207 VkStencilOpState stencil = {};
2208 stencil.failOp = VK_STENCIL_OP_KEEP;
2209 stencil.passOp = VK_STENCIL_OP_KEEP;
2210 stencil.depthFailOp = VK_STENCIL_OP_KEEP;
2211 stencil.compareOp = VK_COMPARE_OP_NEVER;
2212
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07002213 auto ds_ci = LvlInitStruct<VkPipelineDepthStencilStateCreateInfo>();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002214 ds_ci.depthTestEnable = VK_TRUE;
2215 ds_ci.depthWriteEnable = VK_TRUE;
2216 ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
2217 ds_ci.stencilTestEnable = VK_TRUE;
2218 ds_ci.front = stencil;
2219 ds_ci.back = stencil;
2220
2221 g_pipe.gp_ci_.pDepthStencilState = &ds_ci;
2222 g_pipe.InitState();
2223 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
2224
2225 m_commandBuffer->begin();
Tony-LunarG73f37032021-06-07 11:47:03 -06002226 VkClearValue clear = {};
2227 std::array<VkClearValue, 2> clear_values = { {clear, clear} };
2228 m_renderPassBeginInfo.pClearValues = clear_values.data();
2229 m_renderPassBeginInfo.clearValueCount = clear_values.size();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002230 m_renderPassBeginInfo.renderArea = {{0, 0}, {32, 32}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002231 m_renderPassBeginInfo.renderPass = rp.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002232
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002233 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002234 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2235 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
2236 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
2237 m_commandBuffer->EndRenderPass();
2238
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002239 m_renderPassBeginInfo.framebuffer = fb1.handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002240
2241 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
2242 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
2243 m_errorMonitor->VerifyFound();
2244}
2245
John Zulauf01a49ee2022-07-13 11:37:08 -06002246struct CreateRenderPassHelper {
2247 struct SubpassDescriptionStore {
2248 const std::vector<VkAttachmentReference>& input_store;
2249 const std::vector<VkAttachmentReference>& color_store;
2250 VkSubpassDescription desc;
2251 SubpassDescriptionStore(const std::vector<VkAttachmentReference>& input, const std::vector<VkAttachmentReference>& color)
2252 : input_store(input), color_store(color) {
2253 desc = {
2254 0u,
2255 VK_PIPELINE_BIND_POINT_GRAPHICS,
2256 static_cast<uint32_t>(input_store.size()),
2257 input_store.data(),
2258 static_cast<uint32_t>(color_store.size()),
2259 color_store.data(),
2260 nullptr,
2261 nullptr,
2262 0u,
2263 nullptr,
2264 };
2265 if (desc.inputAttachmentCount == 0) {
2266 desc.pInputAttachments = nullptr;
2267 }
2268 if (desc.colorAttachmentCount == 0) {
2269 desc.pColorAttachments = nullptr;
2270 }
2271 }
2272 };
2273
John Zulauf2f5947d2022-07-27 15:36:31 -06002274 VkImageUsageFlags usage_color =
2275 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
John Zulauf01a49ee2022-07-13 11:37:08 -06002276 VkImageUsageFlags usage_input =
2277 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2278 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
2279 VkClearColorValue ccv = {};
2280
2281 VkDeviceObj* dev;
2282 const static uint32_t kDefaultImageSize = 64;
2283 uint32_t width = kDefaultImageSize;
2284 uint32_t height = kDefaultImageSize;
2285 std::shared_ptr<VkImageObj> image_color;
2286 std::shared_ptr<VkImageObj> image_input;
2287 VkImageView view_input = VK_NULL_HANDLE;
2288 VkImageView view_color = VK_NULL_HANDLE;
2289
2290 VkAttachmentReference color_ref;
2291 VkAttachmentReference input_ref;
2292 std::vector<VkImageView> attachments;
2293 VkAttachmentDescription fb_attach_desc;
2294 VkAttachmentDescription input_attach_desc;
2295 std::vector<VkAttachmentDescription> attachment_descs;
2296 std::vector<VkAttachmentReference> input_attachments;
2297 std::vector<VkAttachmentReference> color_attachments;
2298 std::vector<VkSubpassDependency> subpass_dep;
2299 std::vector<VkSubpassDescription> subpasses;
2300 std::vector<SubpassDescriptionStore> subpass_description_store;
2301 VkRenderPassCreateInfo render_pass_create_info;
John Zulauf2f5947d2022-07-27 15:36:31 -06002302 std::shared_ptr<vk_testing::RenderPass> render_pass;
John Zulauf01a49ee2022-07-13 11:37:08 -06002303 std::shared_ptr<vk_testing::Framebuffer> framebuffer;
2304 VkRenderPassBeginInfo render_pass_begin;
2305 std::vector<VkClearValue> clear_colors;
2306
2307 CreateRenderPassHelper(VkDeviceObj* dev_)
2308 : dev(dev_),
2309 image_color(std::make_shared<VkImageObj>(dev)),
2310 image_input(std::make_shared<VkImageObj>(dev)),
2311 color_ref(DefaultColorRef()),
2312 input_ref(DefaultInputRef()),
2313 fb_attach_desc(DefaultFbAttachDesc()),
2314 input_attach_desc(DefaultInputAttachDesc()) {}
2315
2316 CreateRenderPassHelper(const CreateRenderPassHelper& other) = default;
2317
2318 void InitImageAndView() {
2319 auto image_ci = VkImageObj::ImageCreateInfo2D(width, height, 1, 1, format, usage_input, VK_IMAGE_TILING_OPTIMAL);
2320 image_input->InitNoLayout(image_ci);
2321 image_ci.usage = usage_color;
2322 image_color->InitNoLayout(image_ci);
2323
2324 view_input = image_input->targetView(format);
2325 view_color = image_color->targetView(format);
2326 attachments = {view_color, view_input};
2327 }
2328
John Zulauf2f5947d2022-07-27 15:36:31 -06002329 static VkAttachmentReference DefaultColorRef() {
John Zulauf01a49ee2022-07-13 11:37:08 -06002330 return {
2331 0u,
2332 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
2333 };
2334 }
2335
John Zulauf2f5947d2022-07-27 15:36:31 -06002336 static VkAttachmentReference DefaultInputRef() {
John Zulauf01a49ee2022-07-13 11:37:08 -06002337 return {
2338 1u,
2339 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2340 };
2341 };
2342
John Zulauf2f5947d2022-07-27 15:36:31 -06002343 static VkAttachmentReference UnusedColorAttachmentRef() {
2344 return {
2345 VK_ATTACHMENT_UNUSED,
2346 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2347 };
2348 };
2349
John Zulauf01a49ee2022-07-13 11:37:08 -06002350 VkAttachmentDescription DefaultFbAttachDesc() {
2351 return VkAttachmentDescription{
2352 0u,
2353 format,
2354 VK_SAMPLE_COUNT_1_BIT,
2355 VK_ATTACHMENT_LOAD_OP_CLEAR,
2356 VK_ATTACHMENT_STORE_OP_STORE,
2357 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
2358 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2359 VK_IMAGE_LAYOUT_UNDEFINED,
2360 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
2361 };
2362 }
2363 VkAttachmentDescription DefaultInputAttachDesc() const {
2364 return VkAttachmentDescription{
2365 0u,
2366 format,
2367 VK_SAMPLE_COUNT_1_BIT,
2368 VK_ATTACHMENT_LOAD_OP_LOAD,
2369 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2370 VK_ATTACHMENT_LOAD_OP_DONT_CARE,
2371 VK_ATTACHMENT_STORE_OP_DONT_CARE,
2372 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2373 VK_IMAGE_LAYOUT_GENERAL,
2374 };
2375 }
2376
John Zulauf2f5947d2022-07-27 15:36:31 -06002377 void InitAllAttachmentsToLayoutGeneral() {
2378 fb_attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
2379 fb_attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
2380 color_ref.layout = VK_IMAGE_LAYOUT_GENERAL;
2381 input_attach_desc.initialLayout = VK_IMAGE_LAYOUT_GENERAL;
2382 input_attach_desc.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
2383 input_ref.layout = VK_IMAGE_LAYOUT_GENERAL;
2384 }
2385
2386 void SetAttachmentLayout(VkImageObj* image, const VkAttachmentDescription& attach_desc) {
2387 if (image && image->initialized() && (attach_desc.initialLayout != VK_IMAGE_LAYOUT_UNDEFINED)) {
2388 image->SetLayout(attach_desc.initialLayout);
2389 }
2390 }
2391
2392 void SetColorLayout() { SetAttachmentLayout(image_color.get(), fb_attach_desc); }
2393 void SetInputLayout() { SetAttachmentLayout(image_input.get(), input_attach_desc); }
2394
2395 void InitAttachmentLayouts() {
2396 SetColorLayout();
2397 SetInputLayout();
2398 }
2399
John Zulauf01a49ee2022-07-13 11:37:08 -06002400 void InitAttachmentArrays() {
2401 // Add attachments
2402 if (attachment_descs.empty()) {
2403 attachment_descs = {fb_attach_desc, input_attach_desc};
2404 }
2405 if (color_attachments.empty()) {
2406 color_attachments = {color_ref};
2407 }
2408 if (input_attachments.empty()) {
2409 input_attachments = {input_ref};
2410 }
2411 }
2412
John Zulauf2f5947d2022-07-27 15:36:31 -06002413 void AddSubpassDescription(const std::vector<VkAttachmentReference>& input, const std::vector<VkAttachmentReference> color) {
2414 subpass_description_store.emplace_back(input, color);
2415 }
2416
2417 // Capture the current input and color attachements, which can then be modified
2418 void AddSubpassDescription() { subpass_description_store.emplace_back(input_attachments, color_attachments); }
2419
John Zulauf01a49ee2022-07-13 11:37:08 -06002420 // This is the default for a single subpass renderpass, don't call if you want to change that
2421 void InitSubpassDescription() {
2422 if (subpass_description_store.empty()) {
John Zulauf2f5947d2022-07-27 15:36:31 -06002423 AddSubpassDescription();
John Zulauf01a49ee2022-07-13 11:37:08 -06002424 }
2425 }
2426
2427 void InitSubpasses() {
2428 if (subpasses.empty()) {
2429 subpasses.reserve(subpass_description_store.size());
2430 for (const auto& desc_store : subpass_description_store) {
2431 subpasses.emplace_back(desc_store.desc);
2432 }
2433 }
2434 }
2435
2436 void InitRenderPassInfo() {
2437 render_pass_create_info = {VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
2438 nullptr,
2439 0u,
2440 static_cast<uint32_t>(attachment_descs.size()),
2441 attachment_descs.data(),
2442 static_cast<uint32_t>(subpasses.size()),
2443 subpasses.data(),
2444 static_cast<uint32_t>(subpass_dep.size()),
2445 subpass_dep.data()};
2446 }
2447
2448 void InitRenderPass() {
2449 InitAttachmentArrays();
2450 InitSubpassDescription();
2451 InitSubpasses();
2452 InitRenderPassInfo();
John Zulauf2f5947d2022-07-27 15:36:31 -06002453 render_pass = std::make_shared<vk_testing::RenderPass>();
2454 render_pass->init(*dev, render_pass_create_info);
John Zulauf01a49ee2022-07-13 11:37:08 -06002455 }
2456
2457 void InitFramebuffer() {
2458 framebuffer = std::make_shared<vk_testing::Framebuffer>();
2459 VkFramebufferCreateInfo fbci = {VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
2460 0,
2461 0u,
John Zulauf2f5947d2022-07-27 15:36:31 -06002462 render_pass->handle(),
John Zulauf01a49ee2022-07-13 11:37:08 -06002463 static_cast<uint32_t>(attachments.size()),
2464 attachments.data(),
2465 width,
2466 height,
2467 1u};
2468 framebuffer->init(*dev, fbci);
2469 }
2470
2471 void InitState() {
2472 InitImageAndView();
2473 }
2474
2475 void InitBeginInfo() {
2476 render_pass_begin = lvl_init_struct<VkRenderPassBeginInfo>();
2477 render_pass_begin.renderArea = {{0, 0}, {width, height}};
John Zulauf2f5947d2022-07-27 15:36:31 -06002478 render_pass_begin.renderPass = render_pass->handle();
John Zulauf01a49ee2022-07-13 11:37:08 -06002479 render_pass_begin.framebuffer = framebuffer->handle();
2480
2481 // Simplistic ensure enough clear colors, if not provided
2482 // TODO: Should eventually be smart enough to fill in color/depth as appropos
2483 VkClearValue fill_in;
2484 fill_in.color = ccv;
2485 for (size_t i = clear_colors.size(); i < attachments.size(); ++i) {
2486 clear_colors.push_back(fill_in);
2487 }
2488 render_pass_begin.clearValueCount = static_cast<uint32_t>(clear_colors.size());
2489 render_pass_begin.pClearValues = clear_colors.data();
2490 }
2491
John Zulauf2f5947d2022-07-27 15:36:31 -06002492 void InitPipelineHelper(CreatePipelineHelper& g_pipe) {
2493 g_pipe.InitInfo();
2494 g_pipe.ResetShaderInfo(bindStateVertShaderText, bindStateFragSubpassLoadInputText);
2495 g_pipe.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
2496 g_pipe.gp_ci_.renderPass = render_pass->handle();
2497 g_pipe.InitState();
2498 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
2499 }
2500
John Zulauf01a49ee2022-07-13 11:37:08 -06002501 void Init() {
2502 InitState();
2503 InitRenderPass();
2504 InitFramebuffer();
2505 InitBeginInfo();
2506 }
2507};
2508
2509struct SyncTestPipeline {
2510 VkLayerTest& test;
2511 VkRenderPass rp;
2512 CreatePipelineHelper g_pipe;
2513 VkShaderObj vs;
2514 VkShaderObj fs;
2515 VkSamplerCreateInfo sampler_info;
2516 vk_testing::Sampler sampler;
2517 VkImageView view_input = VK_NULL_HANDLE;
2518 SyncTestPipeline(VkLayerTest& test_, VkRenderPass rp_)
2519 : test(test_),
2520 rp(rp_),
2521 g_pipe(test),
2522 vs(&test, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT),
2523 fs(&test, bindStateFragSubpassLoadInputText, VK_SHADER_STAGE_FRAGMENT_BIT),
2524 sampler_info(SafeSaneSamplerCreateInfo()),
2525 sampler() {}
2526 void InitState() {
2527 VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
2528 sampler.init(*test.DeviceObj(), sampler_info);
2529 g_pipe.InitInfo();
2530 g_pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2531 g_pipe.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
2532 g_pipe.gp_ci_.renderPass = rp;
2533 g_pipe.InitState();
2534 }
2535 void Init() {
2536 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
2537 g_pipe.descriptor_set_->WriteDescriptorImageInfo(0, view_input, sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
2538 g_pipe.descriptor_set_->UpdateDescriptorSets();
2539 }
2540};
2541
Jeremy Gebben170781d2020-11-19 16:21:21 -07002542TEST_F(VkSyncValTest, SyncLayoutTransition) {
2543 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2544 ASSERT_NO_FATAL_FAILURE(InitState());
2545 if (IsPlatform(kNexusPlayer)) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06002546 GTEST_SKIP() << "This test should not run on Nexus Player";
Jeremy Gebben170781d2020-11-19 16:21:21 -07002547 }
2548
John Zulauf01a49ee2022-07-13 11:37:08 -06002549 CreateRenderPassHelper rp_helper(m_device);
2550 rp_helper.Init();
2551 const VkImage image_input_handle = rp_helper.image_input->handle();
John Zulauf2f5947d2022-07-27 15:36:31 -06002552 const VkRenderPass rp = rp_helper.render_pass->handle();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002553
John Zulauf01a49ee2022-07-13 11:37:08 -06002554 SyncTestPipeline st_pipe(*this, rp);
2555 st_pipe.InitState();
2556 st_pipe.view_input = rp_helper.view_input;
2557 st_pipe.Init();
2558 const auto& g_pipe = st_pipe.g_pipe;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002559
2560 m_commandBuffer->begin();
2561 auto cb = m_commandBuffer->handle();
2562 VkClearColorValue ccv = {};
2563 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
2564
2565 const VkImageMemoryBarrier preClearBarrier = {
2566 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, 0, 0, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED,
John Zulauf01a49ee2022-07-13 11:37:08 -06002567 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 0, 0, image_input_handle, full_subresource_range,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002568 };
2569 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 1u,
2570 &preClearBarrier);
2571
John Zulauf01a49ee2022-07-13 11:37:08 -06002572 vk::CmdClearColorImage(m_commandBuffer->handle(), image_input_handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &ccv, 1,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002573 &full_subresource_range);
2574
2575 const VkImageMemoryBarrier postClearBarrier = {
2576 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2577 0,
2578 VK_ACCESS_TRANSFER_WRITE_BIT,
John Zulauffa44ab22022-07-14 15:12:28 -06002579 VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002580 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2581 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2582 0,
2583 0,
John Zulauf01a49ee2022-07-13 11:37:08 -06002584 image_input_handle,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002585 full_subresource_range,
2586 };
John Zulauffa44ab22022-07-14 15:12:28 -06002587 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT,
2588 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0u, 0u, nullptr,
2589 0u, nullptr, 1u, &postClearBarrier);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002590
John Zulauf01a49ee2022-07-13 11:37:08 -06002591 m_commandBuffer->BeginRenderPass(rp_helper.render_pass_begin);
John Zulauffa44ab22022-07-14 15:12:28 -06002592 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
2593 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
2594 &g_pipe.descriptor_set_->set_, 0, nullptr);
2595
2596 // Positive test for ordering rules between load and input attachment usage
2597 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
2598
2599 // Positive test for store ordering vs. input attachment and dependency *to* external for layout transition
2600 m_commandBuffer->EndRenderPass();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002601
2602 // Catch a conflict with the input attachment final layout transition
2603 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
John Zulauf01a49ee2022-07-13 11:37:08 -06002604 vk::CmdClearColorImage(m_commandBuffer->handle(), image_input_handle, VK_IMAGE_LAYOUT_GENERAL, &ccv, 1,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002605 &full_subresource_range);
2606 m_errorMonitor->VerifyFound();
John Zulaufe972b752021-05-04 15:47:17 -06002607
2608 // There should be no hazard for ILT after ILT
John Zulaufe972b752021-05-04 15:47:17 -06002609 m_commandBuffer->end();
2610 m_commandBuffer->reset();
2611 m_commandBuffer->begin();
2612 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 1u,
2613 &preClearBarrier);
2614 const VkImageMemoryBarrier wawBarrier = {
2615 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2616 0,
2617 VK_ACCESS_SHADER_READ_BIT,
2618 VK_ACCESS_SHADER_READ_BIT,
2619 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2620 VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
2621 0,
2622 0,
John Zulauf01a49ee2022-07-13 11:37:08 -06002623 image_input_handle,
John Zulaufe972b752021-05-04 15:47:17 -06002624 full_subresource_range,
2625 };
John Zulaufe972b752021-05-04 15:47:17 -06002626 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0u, 0u, nullptr, 0u,
2627 nullptr, 1u, &wawBarrier);
John Zulaufe972b752021-05-04 15:47:17 -06002628 m_commandBuffer->end();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002629}
2630
2631TEST_F(VkSyncValTest, SyncSubpassMultiDep) {
2632 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2633 ASSERT_NO_FATAL_FAILURE(InitState());
2634 if (IsPlatform(kNexusPlayer)) {
2635 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
2636 return;
2637 }
2638
John Zulauf01a49ee2022-07-13 11:37:08 -06002639 CreateRenderPassHelper rp_helper_positive(m_device);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002640
Jeremy Gebben170781d2020-11-19 16:21:21 -07002641 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
2642 VkImageSubresourceLayers mip_0_layer_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
2643 VkOffset3D image_zero{0, 0, 0};
John Zulauf01a49ee2022-07-13 11:37:08 -06002644 VkExtent3D image_size{rp_helper_positive.width, rp_helper_positive.height, 1};
2645
Jeremy Gebben170781d2020-11-19 16:21:21 -07002646 VkImageCopy full_region{mip_0_layer_0, image_zero, mip_0_layer_0, image_zero, image_size};
2647
John Zulauf01a49ee2022-07-13 11:37:08 -06002648 rp_helper_positive.InitState();
John Zulauf2f5947d2022-07-27 15:36:31 -06002649 rp_helper_positive.InitAllAttachmentsToLayoutGeneral();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002650
John Zulauf01a49ee2022-07-13 11:37:08 -06002651 // Copy the comon state to the other renderpass helper
2652 CreateRenderPassHelper rp_helper_negative(m_device);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002653
John Zulauf01a49ee2022-07-13 11:37:08 -06002654 auto& subpass_dep_positive = rp_helper_positive.subpass_dep;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002655
John Zulauf01a49ee2022-07-13 11:37:08 -06002656 subpass_dep_positive.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2657 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2658 VK_DEPENDENCY_VIEW_LOCAL_BIT});
2659 subpass_dep_positive.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2660 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT,
2661 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, VK_DEPENDENCY_VIEW_LOCAL_BIT});
2662 subpass_dep_positive.push_back({0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
2663 VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2664 VK_ACCESS_TRANSFER_READ_BIT, VK_DEPENDENCY_VIEW_LOCAL_BIT});
2665 subpass_dep_positive.push_back({0, VK_SUBPASS_EXTERNAL, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
2666 VK_PIPELINE_STAGE_TRANSFER_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2667 VK_ACCESS_TRANSFER_WRITE_BIT, VK_DEPENDENCY_VIEW_LOCAL_BIT});
Jeremy Gebben170781d2020-11-19 16:21:21 -07002668
John Zulauf01a49ee2022-07-13 11:37:08 -06002669 rp_helper_positive.InitRenderPass();
2670 rp_helper_positive.InitFramebuffer();
2671 rp_helper_positive.InitBeginInfo();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002672
John Zulauf01a49ee2022-07-13 11:37:08 -06002673 auto& subpass_dep_negative = rp_helper_negative.subpass_dep;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002674 subpass_dep_negative.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2675 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2676 VK_DEPENDENCY_VIEW_LOCAL_BIT});
2677 // Show that the two barriers do *not* chain by breaking the positive barrier into two bits.
2678 subpass_dep_negative.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
2679 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT, 0,
2680 VK_DEPENDENCY_VIEW_LOCAL_BIT});
2681 subpass_dep_negative.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
2682 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
2683 VK_DEPENDENCY_VIEW_LOCAL_BIT});
John Zulauf01a49ee2022-07-13 11:37:08 -06002684 rp_helper_negative.InitRenderPass();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002685
John Zulauf01a49ee2022-07-13 11:37:08 -06002686 // Negative and postive RP's are compatible.
2687 rp_helper_negative.framebuffer = rp_helper_positive.framebuffer;
2688 rp_helper_negative.InitBeginInfo();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002689
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002690 vk_testing::Sampler sampler;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002691 VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002692 sampler.init(*m_device, sampler_info);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002693
Jeremy Gebben170781d2020-11-19 16:21:21 -07002694
2695 CreatePipelineHelper g_pipe(*this);
John Zulauf2f5947d2022-07-27 15:36:31 -06002696 rp_helper_positive.InitPipelineHelper(g_pipe);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002697
John Zulauf2f5947d2022-07-27 15:36:31 -06002698 g_pipe.descriptor_set_->WriteDescriptorImageInfo(0, rp_helper_positive.view_input, VK_NULL_HANDLE,
John Zulauf01a49ee2022-07-13 11:37:08 -06002699 VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002700 g_pipe.descriptor_set_->UpdateDescriptorSets();
2701
2702 m_commandBuffer->begin();
2703 auto cb = m_commandBuffer->handle();
2704 VkClearColorValue ccv = {};
2705
2706 const VkImageMemoryBarrier xferDestBarrier = {VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2707 nullptr,
2708 VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
2709 VK_ACCESS_TRANSFER_WRITE_BIT,
2710 VK_IMAGE_LAYOUT_GENERAL,
2711 VK_IMAGE_LAYOUT_GENERAL,
2712 VK_QUEUE_FAMILY_IGNORED,
2713 VK_QUEUE_FAMILY_IGNORED,
2714 VK_NULL_HANDLE,
2715 full_subresource_range};
2716 const VkImageMemoryBarrier xferDestToSrcBarrier = {
2717 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2718 nullptr,
2719 VK_ACCESS_TRANSFER_WRITE_BIT,
2720 VK_ACCESS_TRANSFER_READ_BIT,
2721 VK_IMAGE_LAYOUT_GENERAL,
2722 VK_IMAGE_LAYOUT_GENERAL,
2723 VK_QUEUE_FAMILY_IGNORED,
2724 VK_QUEUE_FAMILY_IGNORED,
2725 VK_NULL_HANDLE,
2726 full_subresource_range,
2727 };
2728
John Zulauf01a49ee2022-07-13 11:37:08 -06002729 const VkImage image_color = rp_helper_positive.image_color->handle();
2730 const VkImage image_input = rp_helper_positive.image_input->handle();
2731
Jeremy Gebben170781d2020-11-19 16:21:21 -07002732 VkImageMemoryBarrier preClearBarrier = xferDestBarrier;
John Zulauf01a49ee2022-07-13 11:37:08 -06002733 preClearBarrier.image = image_color;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002734
2735 VkImageMemoryBarrier preCopyBarriers[2] = {xferDestToSrcBarrier, xferDestBarrier};
John Zulauf01a49ee2022-07-13 11:37:08 -06002736 preCopyBarriers[0].image = image_color;
2737 preCopyBarriers[1].image = image_input;
Jeremy Gebben170781d2020-11-19 16:21:21 -07002738 // Positive test for ordering rules between load and input attachment usage
Jeremy Gebben170781d2020-11-19 16:21:21 -07002739
2740 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 1u,
2741 &preClearBarrier);
2742
John Zulauf01a49ee2022-07-13 11:37:08 -06002743 vk::CmdClearColorImage(m_commandBuffer->handle(), image_color, VK_IMAGE_LAYOUT_GENERAL, &ccv, 1, &full_subresource_range);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002744
2745 vk::CmdPipelineBarrier(cb, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0u, 0u, nullptr, 0u, nullptr, 2u,
2746 preCopyBarriers);
2747
John Zulauf01a49ee2022-07-13 11:37:08 -06002748 vk::CmdCopyImage(m_commandBuffer->handle(), image_color, VK_IMAGE_LAYOUT_GENERAL, image_input, VK_IMAGE_LAYOUT_GENERAL, 1u,
2749 &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002750
2751 // No post copy image barrier, we are testing the subpass dependencies
2752
Jeremy Gebben170781d2020-11-19 16:21:21 -07002753 // Postive renderpass multidependency test
John Zulauf01a49ee2022-07-13 11:37:08 -06002754 m_commandBuffer->BeginRenderPass(rp_helper_positive.render_pass_begin);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002755 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
2756 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
2757 &g_pipe.descriptor_set_->set_, 0, nullptr);
2758
2759 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
2760
2761 // Positive test for store ordering vs. input attachment and dependency *to* external for layout transition
2762 m_commandBuffer->EndRenderPass();
Jeremy Gebben170781d2020-11-19 16:21:21 -07002763
John Zulauf01a49ee2022-07-13 11:37:08 -06002764 vk::CmdCopyImage(m_commandBuffer->handle(), image_color, VK_IMAGE_LAYOUT_GENERAL, image_input,
Jeremy Gebben170781d2020-11-19 16:21:21 -07002765 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &full_region);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002766
Jeremy Gebben170781d2020-11-19 16:21:21 -07002767 // Postive renderpass multidependency test, will fail IFF the dependencies are acting indepently.
2768 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "SYNC-HAZARD-READ_AFTER_WRITE");
John Zulauf01a49ee2022-07-13 11:37:08 -06002769 m_commandBuffer->BeginRenderPass(rp_helper_negative.render_pass_begin);
Jeremy Gebben170781d2020-11-19 16:21:21 -07002770 m_errorMonitor->VerifyFound();
2771}
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002772
2773TEST_F(VkSyncValTest, RenderPassAsyncHazard) {
2774 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
2775 ASSERT_NO_FATAL_FAILURE(InitState());
2776
Nathaniel Cesariof9cd1a82021-07-24 08:48:55 -06002777 if (IsPlatform(kPixel3) || IsPlatform(kPixel3aXL)) {
Nathaniel Cesario0d50bcf2022-06-21 10:30:04 -06002778 GTEST_SKIP() << "Temporarily disabling on Pixel 3 and Pixel 3a XL due to driver crash";
Nathaniel Cesariof9cd1a82021-07-24 08:48:55 -06002779 }
2780
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002781 // overall set up:
2782 // subpass 0:
2783 // write image 0
2784 // subpass 1:
2785 // read image 0
2786 // write image 1
2787 // subpass 2:
2788 // read image 0
2789 // write image 2
2790 // subpass 3:
2791 // read image 0
2792 // write image 3
2793 //
2794 // subpasses 1 & 2 can run in parallel but both should depend on 0
2795 // subpass 3 must run after 1 & 2 because otherwise the store operation will
2796 // race with the reads in the other subpasses.
2797
2798 constexpr VkFormat kFormat = VK_FORMAT_R8G8B8A8_UNORM;
2799 constexpr uint32_t kWidth = 32, kHeight = 32;
2800 constexpr uint32_t kNumImages = 4;
2801
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002802 VkImageCreateInfo src_img_info = LvlInitStruct<VkImageCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002803 src_img_info.flags = 0;
2804 src_img_info.imageType = VK_IMAGE_TYPE_2D;
2805 src_img_info.format = kFormat;
2806 src_img_info.extent = {kWidth, kHeight, 1};
2807 src_img_info.mipLevels = 1;
2808 src_img_info.arrayLayers = 1;
2809 src_img_info.samples = VK_SAMPLE_COUNT_2_BIT;
2810 src_img_info.tiling = VK_IMAGE_TILING_OPTIMAL;
2811 src_img_info.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
2812 src_img_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2813 src_img_info.queueFamilyIndexCount = 0;
2814 src_img_info.pQueueFamilyIndices = nullptr;
2815 src_img_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2816
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002817 VkImageCreateInfo dst_img_info = LvlInitStruct<VkImageCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002818 dst_img_info.flags = 0;
2819 dst_img_info.imageType = VK_IMAGE_TYPE_2D;
2820 dst_img_info.format = kFormat;
2821 dst_img_info.extent = {kWidth, kHeight, 1};
2822 dst_img_info.mipLevels = 1;
2823 dst_img_info.arrayLayers = 1;
2824 dst_img_info.samples = VK_SAMPLE_COUNT_1_BIT;
2825 dst_img_info.tiling = VK_IMAGE_TILING_OPTIMAL;
2826 dst_img_info.usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2827 dst_img_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2828 dst_img_info.queueFamilyIndexCount = 0;
2829 dst_img_info.pQueueFamilyIndices = nullptr;
2830 dst_img_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2831
2832 std::vector<std::unique_ptr<VkImageObj>> images;
2833 for (uint32_t i = 0; i < kNumImages; i++) {
2834 images.emplace_back(new VkImageObj(m_device));
2835 }
2836 images[0]->Init(src_img_info);
2837 for (uint32_t i = 1; i < images.size(); i++) {
2838 images[i]->Init(dst_img_info);
2839 }
2840
2841 std::array<VkImageView, kNumImages> attachments{};
2842 std::array<VkAttachmentDescription, kNumImages> attachment_descriptions{};
2843 std::array<VkAttachmentReference, kNumImages> color_refs{};
2844 std::array<VkImageMemoryBarrier, kNumImages> img_barriers{};
2845
2846 for (uint32_t i = 0; i < attachments.size(); i++) {
2847 attachments[i] = images[i]->targetView(kFormat);
2848 attachment_descriptions[i] = {};
2849 attachment_descriptions[i].flags = 0;
2850 attachment_descriptions[i].format = kFormat;
2851 attachment_descriptions[i].samples = VK_SAMPLE_COUNT_1_BIT;
2852 attachment_descriptions[i].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
2853 attachment_descriptions[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
2854 attachment_descriptions[i].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
2855 attachment_descriptions[i].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
2856 attachment_descriptions[i].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2857 attachment_descriptions[i].finalLayout =
2858 (i == 0) ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2859
2860 color_refs[i] = {i, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
2861
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002862 img_barriers[i] = LvlInitStruct<VkImageMemoryBarrier>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002863 img_barriers[i].srcAccessMask = 0;
2864 img_barriers[i].dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
2865 img_barriers[i].oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2866 img_barriers[i].newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2867 img_barriers[i].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2868 img_barriers[i].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2869 img_barriers[i].image = images[i]->handle();
2870 img_barriers[i].subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, VK_REMAINING_MIP_LEVELS, 0, VK_REMAINING_ARRAY_LAYERS};
2871 }
2872
2873 const VkAttachmentReference input_ref{0u, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL};
2874
2875 std::array<std::array<uint32_t, 2>, kNumImages - 1> preserve_subpass{{{2, 3}, {1, 3}, {1, 2}}};
2876
2877 std::array<VkSubpassDescription, kNumImages> subpasses{};
2878
2879 subpasses[0].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
2880 subpasses[0].inputAttachmentCount = 0;
2881 subpasses[0].pInputAttachments = nullptr;
2882 subpasses[0].colorAttachmentCount = 1;
2883 subpasses[0].pColorAttachments = &color_refs[0];
2884
2885 for (uint32_t i = 1; i < subpasses.size(); i++) {
2886 subpasses[i].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
2887 subpasses[i].inputAttachmentCount = 1;
2888 subpasses[i].pInputAttachments = &input_ref;
2889 subpasses[i].colorAttachmentCount = 1;
2890 subpasses[i].pColorAttachments = &color_refs[1];
2891 subpasses[i].preserveAttachmentCount = preserve_subpass[i - 1].size();
2892 subpasses[i].pPreserveAttachments = preserve_subpass[i - 1].data();
2893 }
2894
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002895 VkRenderPassCreateInfo renderpass_info = LvlInitStruct<VkRenderPassCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002896 renderpass_info.flags = 0;
2897 renderpass_info.attachmentCount = attachment_descriptions.size();
2898 renderpass_info.pAttachments = attachment_descriptions.data();
2899 renderpass_info.subpassCount = subpasses.size();
2900 renderpass_info.pSubpasses = subpasses.data();
2901 renderpass_info.dependencyCount = 0;
2902 renderpass_info.pDependencies = nullptr;
2903
sfricke-samsung6fc3e322022-02-15 22:41:29 -08002904 VkFramebufferCreateInfo fbci = LvlInitStruct<VkFramebufferCreateInfo>();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002905 fbci.flags = 0;
2906 fbci.attachmentCount = attachments.size();
2907 fbci.pAttachments = attachments.data();
2908 fbci.width = kWidth;
2909 fbci.height = kHeight;
2910 fbci.layers = 1;
2911
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002912 vk_testing::Sampler sampler;
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002913 VkSamplerCreateInfo sampler_info = SafeSaneSamplerCreateInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002914 sampler.init(*m_device, sampler_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002915
sfricke-samsungae54c1e2022-01-21 05:35:21 -08002916 VkShaderObj vs(this, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT);
John Zulauf01a49ee2022-07-13 11:37:08 -06002917 VkShaderObj fs(this, bindStateFragSubpassLoadInputText, VK_SHADER_STAGE_FRAGMENT_BIT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002918
2919 VkClearValue clear = {};
2920 clear.color = m_clear_color;
Tony-LunarG73f37032021-06-07 11:47:03 -06002921 std::array<VkClearValue, 4> clear_values = {{clear, clear, clear, clear}};
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002922
2923 // run the renderpass with no dependencies
2924 {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002925 vk_testing::RenderPass rp;
2926 vk_testing::Framebuffer fb;
2927 rp.init(*m_device, renderpass_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002928
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002929 fbci.renderPass = rp.handle();
2930 fb.init(*m_device, fbci);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002931
2932 CreatePipelineHelper g_pipe_0(*this);
2933 g_pipe_0.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002934 g_pipe_0.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002935 g_pipe_0.InitState();
2936 ASSERT_VK_SUCCESS(g_pipe_0.CreateGraphicsPipeline());
2937
2938 CreatePipelineHelper g_pipe_12(*this);
2939 g_pipe_12.InitInfo();
2940 g_pipe_12.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
2941 g_pipe_12.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002942 g_pipe_12.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002943 g_pipe_12.InitState();
2944 ASSERT_VK_SUCCESS(g_pipe_12.CreateGraphicsPipeline());
2945
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002946 g_pipe_12.descriptor_set_->WriteDescriptorImageInfo(0, attachments[0], sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002947 g_pipe_12.descriptor_set_->UpdateDescriptorSets();
2948
2949 m_commandBuffer->begin();
2950
2951 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
2952 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, img_barriers.size(),
2953 img_barriers.data());
2954
2955 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
2956 m_renderPassBeginInfo.pClearValues = clear_values.data();
2957 m_renderPassBeginInfo.clearValueCount = clear_values.size();
2958
2959 m_renderPassBeginInfo.renderArea = {{0, 0}, {kWidth, kHeight}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06002960 m_renderPassBeginInfo.renderPass = rp.handle();
2961 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002962
2963 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
2964 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_);
2965 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_layout_.handle(), 0,
2966 1, &g_pipe_0.descriptor_set_->set_, 0, NULL);
2967
2968 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
2969
2970 for (uint32_t i = 1; i < subpasses.size(); i++) {
2971 vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
2972 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_12.pipeline_);
2973 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS,
2974 g_pipe_12.pipeline_layout_.handle(), 0, 1, &g_pipe_12.descriptor_set_->set_, 0, NULL);
2975
2976 // we're racing the writes from subpass 0 with our shader reads
2977 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ-RACING-WRITE");
2978 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
2979 m_errorMonitor->VerifyFound();
2980 }
2981
2982 // we should get an error from async checking in both subpasses 2 & 3
2983 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-WRITE");
2984 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-WRITE");
2985 vk::CmdEndRenderPass(m_commandBuffer->handle());
2986 m_errorMonitor->VerifyFound();
2987
2988 m_commandBuffer->end();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07002989 }
2990
2991 // add dependencies from subpass 0 to the others, which are necessary but not sufficient
2992 std::vector<VkSubpassDependency> subpass_dependencies;
2993 for (uint32_t i = 1; i < subpasses.size(); i++) {
2994 VkSubpassDependency dep{0,
2995 i,
2996 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
2997 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
2998 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
2999 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
3000 0};
3001 subpass_dependencies.push_back(dep);
3002 }
3003 renderpass_info.dependencyCount = subpass_dependencies.size();
3004 renderpass_info.pDependencies = subpass_dependencies.data();
3005
3006 {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003007 vk_testing::RenderPass rp;
3008 vk_testing::Framebuffer fb;
3009 rp.init(*m_device, renderpass_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003010
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003011 fbci.renderPass = rp.handle();
3012 fb.init(*m_device, fbci);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003013
3014 CreatePipelineHelper g_pipe_0(*this);
3015 g_pipe_0.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003016 g_pipe_0.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003017 g_pipe_0.InitState();
3018 ASSERT_VK_SUCCESS(g_pipe_0.CreateGraphicsPipeline());
3019
3020 CreatePipelineHelper g_pipe_12(*this);
3021 g_pipe_12.InitInfo();
3022 g_pipe_12.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
3023 g_pipe_12.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003024 g_pipe_12.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003025 g_pipe_12.InitState();
3026 ASSERT_VK_SUCCESS(g_pipe_12.CreateGraphicsPipeline());
3027
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003028 g_pipe_12.descriptor_set_->WriteDescriptorImageInfo(0, attachments[0], sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003029 g_pipe_12.descriptor_set_->UpdateDescriptorSets();
3030
3031 m_commandBuffer->begin();
3032
3033 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
3034 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, img_barriers.size(),
3035 img_barriers.data());
3036
3037 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
3038 m_renderPassBeginInfo.pClearValues = clear_values.data();
3039 m_renderPassBeginInfo.clearValueCount = clear_values.size();
3040
3041 m_renderPassBeginInfo.renderArea = {{0, 0}, {kWidth, kHeight}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003042 m_renderPassBeginInfo.renderPass = rp.handle();
3043 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003044
3045 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
3046 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_);
3047 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_layout_.handle(), 0,
3048 1, &g_pipe_0.descriptor_set_->set_, 0, NULL);
3049
3050 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3051
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003052 for (uint32_t i = 1; i < subpasses.size(); i++) {
3053 vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
3054 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_12.pipeline_);
3055 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS,
3056 g_pipe_12.pipeline_layout_.handle(), 0, 1, &g_pipe_12.descriptor_set_->set_, 0, NULL);
3057 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3058 }
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003059 // expect this error because 2 subpasses could try to do the store operation
3060 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-WRITE");
3061 // ... and this one because the store could happen during a shader read from another subpass
3062 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ");
3063 vk::CmdEndRenderPass(m_commandBuffer->handle());
3064 m_errorMonitor->VerifyFound();
3065
3066 m_commandBuffer->end();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003067 }
3068
3069 // try again with correct dependencies to make subpass 3 depend on 1 & 2
3070 for (uint32_t i = 1; i < (subpasses.size() - 1); i++) {
3071 VkSubpassDependency dep{i,
3072 static_cast<uint32_t>(subpasses.size() - 1),
3073 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
3074 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
3075 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
3076 VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
3077 0};
3078 subpass_dependencies.push_back(dep);
3079 }
3080 renderpass_info.dependencyCount = subpass_dependencies.size();
3081 renderpass_info.pDependencies = subpass_dependencies.data();
3082 {
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003083 vk_testing::RenderPass rp;
3084 vk_testing::Framebuffer fb;
3085 rp.init(*m_device, renderpass_info);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003086
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003087 fbci.renderPass = rp.handle();
3088 fb.init(*m_device, fbci);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003089
3090 CreatePipelineHelper g_pipe_0(*this);
3091 g_pipe_0.InitInfo();
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003092 g_pipe_0.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003093 g_pipe_0.InitState();
3094 ASSERT_VK_SUCCESS(g_pipe_0.CreateGraphicsPipeline());
3095
3096 CreatePipelineHelper g_pipe_12(*this);
3097 g_pipe_12.InitInfo();
3098 g_pipe_12.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
3099 g_pipe_12.dsl_bindings_ = {{0, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003100 g_pipe_12.gp_ci_.renderPass = rp.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003101 g_pipe_12.InitState();
3102 ASSERT_VK_SUCCESS(g_pipe_12.CreateGraphicsPipeline());
3103
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003104 g_pipe_12.descriptor_set_->WriteDescriptorImageInfo(0, attachments[0], sampler.handle(), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003105 g_pipe_12.descriptor_set_->UpdateDescriptorSets();
3106
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003107 m_commandBuffer->begin();
3108 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
3109 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, nullptr, 0, nullptr, img_barriers.size(),
3110 img_barriers.data());
3111
3112 m_renderPassBeginInfo.renderArea = {{0, 0}, {16, 16}};
3113 m_renderPassBeginInfo.pClearValues = clear_values.data();
3114 m_renderPassBeginInfo.clearValueCount = clear_values.size();
3115
3116 m_renderPassBeginInfo.renderArea = {{0, 0}, {kWidth, kHeight}};
Jeremy Gebben18ac1052021-08-12 11:07:32 -06003117 m_renderPassBeginInfo.renderPass = rp.handle();
3118 m_renderPassBeginInfo.framebuffer = fb.handle();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003119
3120 vk::CmdBeginRenderPass(m_commandBuffer->handle(), &m_renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
3121 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_);
3122 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_0.pipeline_layout_.handle(), 0,
3123 1, &g_pipe_0.descriptor_set_->set_, 0, NULL);
3124
3125 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3126
3127 for (uint32_t i = 1; i < subpasses.size(); i++) {
3128 vk::CmdNextSubpass(m_commandBuffer->handle(), VK_SUBPASS_CONTENTS_INLINE);
3129 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe_12.pipeline_);
3130 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS,
3131 g_pipe_12.pipeline_layout_.handle(), 0, 1, &g_pipe_12.descriptor_set_->set_, 0, NULL);
3132 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3133 }
3134
3135 vk::CmdEndRenderPass(m_commandBuffer->handle());
3136
3137 m_commandBuffer->end();
Jeremy Gebben6feafd42020-11-30 09:11:38 -07003138 }
3139}
John Zulauf025ee442020-12-15 11:44:19 -07003140
3141TEST_F(VkSyncValTest, SyncEventsBufferCopy) {
3142 TEST_DESCRIPTION("Check Set/Wait protection for a variety of use cases using buffer copies");
3143 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3144 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3145
3146 VkBufferObj buffer_a;
3147 VkBufferObj buffer_b;
3148 VkBufferObj buffer_c;
3149 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3150 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
3151 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
3152 buffer_c.init_as_src_and_dst(*m_device, 256, mem_prop);
3153
3154 VkBufferCopy region = {0, 0, 256};
3155 VkBufferCopy front2front = {0, 0, 128};
3156 VkBufferCopy front2back = {0, 128, 128};
3157 VkBufferCopy back2back = {128, 128, 128};
3158
3159 VkEventObj event;
3160 event.init(*m_device, VkEventObj::create_info(0));
3161 VkEvent event_handle = event.handle();
3162
3163 auto cb = m_commandBuffer->handle();
3164 m_commandBuffer->begin();
3165
3166 // Copy after set for WAR (note we are writing to the back half of c but only reading from the front
John Zulauf025ee442020-12-15 11:44:19 -07003167 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3168 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3169 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_c.handle(), 1, &back2back);
3170 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3171 nullptr, 0, nullptr);
3172 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2front);
John Zulauf025ee442020-12-15 11:44:19 -07003173 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
3174 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &front2back);
3175 m_errorMonitor->VerifyFound();
3176 m_commandBuffer->end();
3177
3178 // WAR prevented
3179 m_commandBuffer->reset();
3180 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003181 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3182 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3183 // Just protect against WAR, only need a sync barrier.
3184 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3185 nullptr, 0, nullptr);
3186 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
John Zulauf025ee442020-12-15 11:44:19 -07003187
3188 // Wait shouldn't prevent this WAW though, as it's only a synchronization barrier
3189 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3190 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
3191 m_errorMonitor->VerifyFound();
3192 m_commandBuffer->end();
3193
3194 // Prevent WAR and WAW
3195 m_commandBuffer->reset();
3196 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003197 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3198 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003199 auto mem_barrier_waw = LvlInitStruct<VkMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003200 mem_barrier_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3201 mem_barrier_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3202 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 1,
3203 &mem_barrier_waw, 0, nullptr, 0, nullptr);
3204 // The WAW should be safe (on a memory barrier)
3205 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_b.handle(), 1, &region);
3206 // The WAR should also be safe (on a sync barrier)
3207 vk::CmdCopyBuffer(cb, buffer_c.handle(), buffer_a.handle(), 1, &region);
John Zulauf025ee442020-12-15 11:44:19 -07003208 m_commandBuffer->end();
3209
3210 // Barrier range check for WAW
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003211 auto buffer_barrier_front_waw = LvlInitStruct<VkBufferMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003212 buffer_barrier_front_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3213 buffer_barrier_front_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3214 buffer_barrier_front_waw.buffer = buffer_b.handle();
3215 buffer_barrier_front_waw.offset = front2front.dstOffset;
3216 buffer_barrier_front_waw.size = front2front.size;
3217
3218 // Front safe, back WAW
3219 m_commandBuffer->reset();
3220 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003221 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &region);
3222 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3223 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 1,
3224 &buffer_barrier_front_waw, 0, nullptr);
3225 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
John Zulauf025ee442020-12-15 11:44:19 -07003226 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3227 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &back2back);
3228 m_errorMonitor->VerifyFound();
3229 m_commandBuffer->end();
3230}
3231
3232TEST_F(VkSyncValTest, SyncEventsCopyImageHazards) {
3233 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3234 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3235
3236 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3237 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3238 VkImageObj image_a(m_device);
3239 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 2, format, usage, VK_IMAGE_TILING_OPTIMAL);
3240 image_a.Init(image_ci);
3241 ASSERT_TRUE(image_a.initialized());
3242
3243 VkImageObj image_b(m_device);
3244 image_b.Init(image_ci);
3245 ASSERT_TRUE(image_b.initialized());
3246
3247 VkImageObj image_c(m_device);
3248 image_c.Init(image_ci);
3249 ASSERT_TRUE(image_c.initialized());
3250
3251 VkEventObj event;
3252 event.init(*m_device, VkEventObj::create_info(0));
3253 VkEvent event_handle = event.handle();
3254
3255 VkImageSubresourceLayers layers_all{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 2};
3256 VkImageSubresourceLayers layers_0{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
3257 VkImageSubresourceLayers layers_1{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 1};
3258 VkImageSubresourceRange layers_0_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
3259 VkOffset3D zero_offset{0, 0, 0};
3260 VkOffset3D half_offset{64, 64, 0};
3261 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
3262 VkExtent3D half_extent{64, 64, 1}; // <-- image type is 2D
3263
3264 VkImageCopy full_region = {layers_all, zero_offset, layers_all, zero_offset, full_extent};
3265 VkImageCopy region_0_to_0 = {layers_0, zero_offset, layers_0, zero_offset, full_extent};
3266 VkImageCopy region_1_to_1 = {layers_1, zero_offset, layers_1, zero_offset, full_extent};
3267 VkImageCopy region_0_q0toq0 = {layers_0, zero_offset, layers_0, zero_offset, half_extent};
3268 VkImageCopy region_0_q0toq3 = {layers_0, zero_offset, layers_0, half_offset, half_extent};
3269 VkImageCopy region_0_q3toq3 = {layers_0, half_offset, layers_0, half_offset, half_extent};
3270
3271 auto cb = m_commandBuffer->handle();
3272 auto copy_general = [cb](const VkImageObj &from, const VkImageObj &to, const VkImageCopy &region) {
3273 vk::CmdCopyImage(cb, from.handle(), VK_IMAGE_LAYOUT_GENERAL, to.handle(), VK_IMAGE_LAYOUT_GENERAL, 1, &region);
3274 };
3275
3276 auto set_layouts = [this, &image_a, &image_b, &image_c]() {
3277 image_c.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
3278 image_b.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
3279 image_a.SetLayout(m_commandBuffer, VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
3280 };
3281
John Zulaufdd462092020-12-18 12:00:35 -07003282 // Scope check. One access in, one access not
John Zulauf025ee442020-12-15 11:44:19 -07003283 m_commandBuffer->begin();
3284 set_layouts();
John Zulauf025ee442020-12-15 11:44:19 -07003285 copy_general(image_a, image_b, full_region);
3286 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3287 copy_general(image_a, image_c, region_0_q3toq3);
3288 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3289 nullptr, 0, nullptr);
3290 copy_general(image_c, image_a, region_0_q0toq0);
John Zulauf025ee442020-12-15 11:44:19 -07003291 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
3292 copy_general(image_c, image_a, region_0_q0toq3);
3293 m_errorMonitor->VerifyFound();
3294 m_commandBuffer->end();
3295
3296 // WAR prevented
3297 m_commandBuffer->reset();
3298 m_commandBuffer->begin();
3299 set_layouts();
John Zulauf025ee442020-12-15 11:44:19 -07003300 copy_general(image_a, image_b, full_region);
3301 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3302 // Just protect against WAR, only need a sync barrier.
3303 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3304 nullptr, 0, nullptr);
3305 copy_general(image_c, image_a, full_region);
John Zulauf025ee442020-12-15 11:44:19 -07003306
3307 // Wait shouldn't prevent this WAW though, as it's only a synchronization barrier
3308 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3309 copy_general(image_c, image_b, full_region);
3310 m_errorMonitor->VerifyFound();
3311 m_commandBuffer->end();
3312
3313 // Prevent WAR and WAW
3314 m_commandBuffer->reset();
3315 m_commandBuffer->begin();
John Zulauf025ee442020-12-15 11:44:19 -07003316 set_layouts();
3317 copy_general(image_a, image_b, full_region);
3318 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003319 auto mem_barrier_waw = LvlInitStruct<VkMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003320 mem_barrier_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3321 mem_barrier_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3322 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 1,
3323 &mem_barrier_waw, 0, nullptr, 0, nullptr);
3324 // The WAW should be safe (on a memory barrier)
3325 copy_general(image_c, image_b, full_region);
3326 // The WAR should also be safe (on a sync barrier)
3327 copy_general(image_c, image_a, full_region);
John Zulauf025ee442020-12-15 11:44:19 -07003328 m_commandBuffer->end();
3329
3330 // Barrier range check for WAW
Mark Lobodzinski07d0a612020-12-30 15:42:31 -07003331 auto image_barrier_region0_waw = LvlInitStruct<VkImageMemoryBarrier>();
John Zulauf025ee442020-12-15 11:44:19 -07003332 image_barrier_region0_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3333 image_barrier_region0_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3334 image_barrier_region0_waw.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
3335 image_barrier_region0_waw.newLayout = VK_IMAGE_LAYOUT_GENERAL;
3336 image_barrier_region0_waw.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
3337 image_barrier_region0_waw.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
3338 image_barrier_region0_waw.image = image_b.handle();
3339 image_barrier_region0_waw.subresourceRange = layers_0_subresource_range;
3340
3341 // Region 0 safe, back WAW
3342 m_commandBuffer->reset();
3343 m_commandBuffer->begin();
3344 set_layouts();
John Zulauf025ee442020-12-15 11:44:19 -07003345 copy_general(image_a, image_b, full_region);
3346 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3347 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3348 nullptr, 1, &image_barrier_region0_waw);
3349 copy_general(image_a, image_b, region_0_to_0);
John Zulauf025ee442020-12-15 11:44:19 -07003350 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3351 copy_general(image_a, image_b, region_1_to_1);
3352 m_errorMonitor->VerifyFound();
3353 m_commandBuffer->end();
3354}
John Zulauf4b5e4632020-12-15 11:48:59 -07003355
3356TEST_F(VkSyncValTest, SyncEventsCommandHazards) {
3357 TEST_DESCRIPTION("Check Set/Reset/Wait command hazard checking");
3358 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3359 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3360
3361 VkEventObj event;
3362 event.init(*m_device, VkEventObj::create_info(0));
3363
3364 const VkEvent event_handle = event.handle();
3365
3366 m_commandBuffer->begin();
John Zulauf4b5e4632020-12-15 11:48:59 -07003367 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003368
John Zulauf4edde622021-02-15 08:54:50 -07003369 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "VUID-vkCmdResetEvent-event-03834");
John Zulauf4b5e4632020-12-15 11:48:59 -07003370 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3371 nullptr, 0, nullptr);
3372 m_errorMonitor->VerifyFound();
John Zulauf4b5e4632020-12-15 11:48:59 -07003373 m_commandBuffer->end();
3374
3375 m_commandBuffer->begin();
3376 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3377 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, nullptr,
3378 0, nullptr, 0, nullptr);
John Zulauf4b5e4632020-12-15 11:48:59 -07003379 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdResetEvent-missingbarrier-wait");
3380 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3381 m_errorMonitor->VerifyFound();
John Zulauf4b5e4632020-12-15 11:48:59 -07003382 m_commandBuffer->end();
3383
3384 m_commandBuffer->begin();
3385 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003386 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdSetEvent-missingbarrier-reset");
3387 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3388 m_errorMonitor->VerifyFound();
3389
John Zulauf4b5e4632020-12-15 11:48:59 -07003390 m_commandBuffer->PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0U, 0, nullptr, 0,
3391 nullptr, 0, nullptr);
3392 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3393 m_commandBuffer->WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 0,
3394 nullptr, 0, nullptr);
3395 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3396 m_commandBuffer->PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0U, 0, nullptr, 0,
3397 nullptr, 0, nullptr);
3398 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003399
3400 // Need a barrier between set and a reset
3401 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdResetEvent-missingbarrier-set");
3402 m_commandBuffer->ResetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3403 m_errorMonitor->VerifyFound();
John Zulauf4b5e4632020-12-15 11:48:59 -07003404 m_commandBuffer->end();
3405
3406 m_commandBuffer->begin();
3407 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulauf4b5e4632020-12-15 11:48:59 -07003408 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-vkCmdSetEvent-missingbarrier-set");
3409 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3410 m_errorMonitor->VerifyFound();
3411
3412 m_commandBuffer->end();
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003413
3414 // Secondary command buffer events tests
3415 const auto cb = m_commandBuffer->handle();
3416 VkBufferObj buffer_a;
3417 VkBufferObj buffer_b;
3418 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
3419 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
3420 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
3421
3422 VkBufferCopy front2front = {0, 0, 128};
3423
3424 // Barrier range check for WAW
3425 auto buffer_barrier_front_waw = LvlInitStruct<VkBufferMemoryBarrier>();
3426 buffer_barrier_front_waw.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3427 buffer_barrier_front_waw.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
3428 buffer_barrier_front_waw.buffer = buffer_b.handle();
3429 buffer_barrier_front_waw.offset = front2front.dstOffset;
3430 buffer_barrier_front_waw.size = front2front.size;
3431
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003432 VkCommandBufferObj secondary_cb1(m_device, m_commandPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
3433 VkCommandBuffer scb1 = secondary_cb1.handle();
3434 secondary_cb1.begin();
3435 secondary_cb1.WaitEvents(1, &event_handle, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, nullptr, 1,
3436 &buffer_barrier_front_waw, 0, nullptr);
3437 vk::CmdCopyBuffer(scb1, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
3438 secondary_cb1.end();
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003439
3440 // One secondary cb hazarding with primary
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003441 m_commandBuffer->reset();
3442 m_commandBuffer->begin();
3443 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003444 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3445 vk::CmdExecuteCommands(cb, 1, &scb1);
3446 m_errorMonitor->VerifyFound();
3447 m_commandBuffer->end();
3448
3449 // One secondary cb sharing event with primary
John Zulaufb0b6e9b2021-08-20 09:22:45 -06003450 m_commandBuffer->reset();
3451 m_commandBuffer->begin();
3452 vk::CmdCopyBuffer(cb, buffer_a.handle(), buffer_b.handle(), 1, &front2front);
3453 m_commandBuffer->SetEvent(event, VK_PIPELINE_STAGE_TRANSFER_BIT);
3454 vk::CmdExecuteCommands(cb, 1, &scb1);
3455 m_commandBuffer->end();
John Zulauf4b5e4632020-12-15 11:48:59 -07003456}
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003457
3458TEST_F(VkLayerTest, CmdWaitEvents2KHRUsedButSynchronizaion2Disabled) {
3459 TEST_DESCRIPTION("Using CmdWaitEvents2KHR when synchronization2 is not enabled");
Tony-LunarG53b72e52021-11-19 10:04:40 -07003460 SetTargetApiVersion(VK_API_VERSION_1_3);
sfricke-samsung6fc3e322022-02-15 22:41:29 -08003461
Tony-LunarGdf960d42022-01-27 16:13:34 -07003462 AddRequiredExtensions(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003463 ASSERT_NO_FATAL_FAILURE(InitFramework());
sjfricked700bc02022-05-30 16:35:06 +09003464 if (!AreRequiredExtensionsEnabled()) {
3465 GTEST_SKIP() << RequiredExtensionsNotSupported() << " not supported";
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003466 }
sjfricked8e01c52022-07-06 14:09:04 +09003467 ASSERT_NO_FATAL_FAILURE(InitState());
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003468
Tony-LunarG53b72e52021-11-19 10:04:40 -07003469 bool vulkan_13 = (DeviceValidationVersion() >= VK_API_VERSION_1_3);
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003470 auto fpCmdWaitEvents2KHR = (PFN_vkCmdWaitEvents2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdWaitEvents2KHR");
3471
3472 VkEventObj event;
3473 event.init(*m_device, VkEventObj::create_info(0));
3474 VkEvent event_handle = event.handle();
3475
3476 VkDependencyInfoKHR dependency_info = LvlInitStruct<VkDependencyInfoKHR>();
3477
3478 m_commandBuffer->begin();
Tony-LunarG279601c2021-11-16 10:50:51 -07003479 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWaitEvents2-synchronization2-03836");
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003480 fpCmdWaitEvents2KHR(m_commandBuffer->handle(), 1, &event_handle, &dependency_info);
3481 m_errorMonitor->VerifyFound();
Tony-LunarG53b72e52021-11-19 10:04:40 -07003482 if (vulkan_13) {
3483 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWaitEvents2-synchronization2-03836");
3484 vk::CmdWaitEvents2(m_commandBuffer->handle(), 1, &event_handle, &dependency_info);
3485 m_errorMonitor->VerifyFound();
3486 }
ziga-lunarg3a16ff12021-07-30 12:09:55 +02003487 m_commandBuffer->end();
3488}
ziga-lunarg15f450d2021-08-26 23:10:05 +02003489
3490TEST_F(VkLayerTest, Sync2FeatureDisabled) {
3491 TEST_DESCRIPTION("Call sync2 functions when the feature is disabled");
3492
Tony-LunarG53b72e52021-11-19 10:04:40 -07003493 SetTargetApiVersion(VK_API_VERSION_1_3);
Tony-LunarGdf960d42022-01-27 16:13:34 -07003494 AddRequiredExtensions(VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME);
ziga-lunarg15f450d2021-08-26 23:10:05 +02003495 ASSERT_NO_FATAL_FAILURE(InitFramework());
sjfricked700bc02022-05-30 16:35:06 +09003496 if (!AreRequiredExtensionsEnabled()) {
3497 GTEST_SKIP() << RequiredExtensionsNotSupported() << " not supported";
ziga-lunarg15f450d2021-08-26 23:10:05 +02003498 }
3499
3500 ASSERT_NO_FATAL_FAILURE(InitState());
3501
Tony-LunarG53b72e52021-11-19 10:04:40 -07003502 bool vulkan_13 = (DeviceValidationVersion() >= VK_API_VERSION_1_3);
ziga-lunarg15f450d2021-08-26 23:10:05 +02003503 VkPhysicalDeviceSynchronization2FeaturesKHR synchronization2 = LvlInitStruct<VkPhysicalDeviceSynchronization2FeaturesKHR>();
3504 synchronization2.synchronization2 = VK_FALSE; // Invalid
3505 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&synchronization2);
3506 vk::GetPhysicalDeviceFeatures2(gpu(), &features2);
3507
3508 auto vkCmdPipelineBarrier2KHR =
3509 (PFN_vkCmdPipelineBarrier2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPipelineBarrier2KHR");
3510 auto vkCmdResetEvent2KHR = (PFN_vkCmdResetEvent2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdResetEvent2KHR");
3511 auto vkCmdSetEvent2KHR = (PFN_vkCmdSetEvent2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdSetEvent2KHR");
3512 auto vkCmdWriteTimestamp2KHR =
3513 (PFN_vkCmdWriteTimestamp2KHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdWriteTimestamp2KHR");
3514
3515 bool timestamp = false;
3516
3517 uint32_t queue_count;
3518 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, NULL);
3519 std::vector<VkQueueFamilyProperties> queue_props(queue_count);
3520 vk::GetPhysicalDeviceQueueFamilyProperties(gpu(), &queue_count, queue_props.data());
3521 if (queue_props[m_device->graphics_queue_node_index_].timestampValidBits > 0) {
3522 timestamp = true;
3523 }
3524
3525 m_commandBuffer->begin();
3526
3527 VkDependencyInfoKHR dependency_info = LvlInitStruct<VkDependencyInfoKHR>();
3528
Tony-LunarG279601c2021-11-16 10:50:51 -07003529 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdPipelineBarrier2-synchronization2-03848");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003530 vkCmdPipelineBarrier2KHR(m_commandBuffer->handle(), &dependency_info);
3531 m_errorMonitor->VerifyFound();
3532
3533 VkEventCreateInfo eci = LvlInitStruct<VkEventCreateInfo>();
3534 vk_testing::Event event;
3535 event.init(*m_device, eci);
3536
3537 VkPipelineStageFlagBits2KHR stage = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR;
3538
Tony-LunarG279601c2021-11-16 10:50:51 -07003539 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdResetEvent2-synchronization2-03829");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003540 vkCmdResetEvent2KHR(m_commandBuffer->handle(), event.handle(), stage);
3541 m_errorMonitor->VerifyFound();
3542
Tony-LunarG279601c2021-11-16 10:50:51 -07003543 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdSetEvent2-synchronization2-03824");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003544 vkCmdSetEvent2KHR(m_commandBuffer->handle(), event.handle(), &dependency_info);
3545 m_errorMonitor->VerifyFound();
3546
3547 if (timestamp) {
3548 VkQueryPoolCreateInfo qpci = LvlInitStruct<VkQueryPoolCreateInfo>();
3549 qpci.queryType = VK_QUERY_TYPE_TIMESTAMP;
3550 qpci.queryCount = 1;
3551
3552 vk_testing::QueryPool query_pool;
3553 query_pool.init(*m_device, qpci);
3554
Tony-LunarG279601c2021-11-16 10:50:51 -07003555 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWriteTimestamp2-synchronization2-03858");
ziga-lunarg15f450d2021-08-26 23:10:05 +02003556 vkCmdWriteTimestamp2KHR(m_commandBuffer->handle(), stage, query_pool.handle(), 0);
3557 m_errorMonitor->VerifyFound();
Tony-LunarG53b72e52021-11-19 10:04:40 -07003558 if (vulkan_13) {
3559 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdWriteTimestamp2-synchronization2-03858");
3560 vk::CmdWriteTimestamp2(m_commandBuffer->handle(), stage, query_pool.handle(), 0);
3561 m_errorMonitor->VerifyFound();
3562 }
3563 }
3564 if (vulkan_13) {
3565 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdPipelineBarrier2-synchronization2-03848");
3566 vk::CmdPipelineBarrier2(m_commandBuffer->handle(), &dependency_info);
3567 m_errorMonitor->VerifyFound();
3568
3569 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdResetEvent2-synchronization2-03829");
3570 vk::CmdResetEvent2(m_commandBuffer->handle(), event.handle(), stage);
3571 m_errorMonitor->VerifyFound();
3572
3573 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "VUID-vkCmdSetEvent2-synchronization2-03824");
3574 vk::CmdSetEvent2(m_commandBuffer->handle(), event.handle(), &dependency_info);
3575 m_errorMonitor->VerifyFound();
ziga-lunarg15f450d2021-08-26 23:10:05 +02003576 }
3577
3578 m_commandBuffer->end();
3579}
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003580
3581TEST_F(VkSyncValTest, DestroyedUnusedDescriptors) {
3582 TEST_DESCRIPTION("Verify unused descriptors are ignored and don't crash syncval if they've been destroyed.");
3583 SetTargetApiVersion(VK_API_VERSION_1_1);
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003584 AddRequiredExtensions(VK_KHR_MAINTENANCE_3_EXTENSION_NAME);
3585 AddRequiredExtensions(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
3586
3587 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3588
sjfricked700bc02022-05-30 16:35:06 +09003589 if (!AreRequiredExtensionsEnabled()) {
3590 GTEST_SKIP() << RequiredExtensionsNotSupported() << " not supported";
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003591 }
3592
3593 auto indexing_features = LvlInitStruct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
3594 auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>();
3595 features2.pNext = &indexing_features;
3596
3597 auto vkGetPhysicalDeviceFeatures2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures2KHR>(
3598 vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR"));
3599 ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
3600
3601 vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
3602 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
3603 if (!indexing_features.descriptorBindingPartiallyBound) {
3604 printf("%s Partially bound bindings not supported, skipping test\n", kSkipPrefix);
3605 return;
3606 }
3607 if (!indexing_features.descriptorBindingUpdateUnusedWhilePending) {
3608 printf("%s Updating unused while pending is not supported, skipping test\n", kSkipPrefix);
3609 return;
3610 }
3611
3612 ASSERT_NO_FATAL_FAILURE(InitViewport());
3613 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003614
sfricke-samsung6fc3e322022-02-15 22:41:29 -08003615 VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags =
3616 LvlInitStruct<VkDescriptorSetLayoutBindingFlagsCreateInfoEXT>();
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003617 constexpr size_t kNumDescriptors = 6;
3618
3619 std::array<VkDescriptorBindingFlagsEXT, kNumDescriptors> ds_binding_flags;
3620 for (auto &elem : ds_binding_flags) {
3621 elem = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT;
3622 }
3623
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003624 layout_createinfo_binding_flags.bindingCount = ds_binding_flags.size();
3625 layout_createinfo_binding_flags.pBindingFlags = ds_binding_flags.data();
3626
3627 // Prepare descriptors
3628 OneOffDescriptorSet descriptor_set(m_device,
3629 {
3630 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
3631 {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
3632 {2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
3633 {3, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
3634 {4, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
3635 {5, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
3636 },
3637 0, &layout_createinfo_binding_flags, 0);
3638 const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
3639 uint32_t qfi = 0;
3640 auto buffer_create_info = LvlInitStruct<VkBufferCreateInfo>();
3641 buffer_create_info.size = 32;
3642 buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
3643 buffer_create_info.queueFamilyIndexCount = 1;
3644 buffer_create_info.pQueueFamilyIndices = &qfi;
3645
3646 VkBufferObj doit_buffer;
3647 doit_buffer.init(*m_device, buffer_create_info);
3648
3649 auto buffer = layer_data::make_unique<VkBufferObj>();
3650 buffer->init(*m_device, buffer_create_info);
3651
3652 VkDescriptorBufferInfo buffer_info[2] = {};
3653 buffer_info[0].buffer = doit_buffer.handle();
3654 buffer_info[0].offset = 0;
3655 buffer_info[0].range = sizeof(uint32_t);
3656 buffer_info[1].buffer = buffer->handle();
3657 buffer_info[1].offset = 0;
3658 buffer_info[1].range = sizeof(uint32_t);
3659
3660 VkBufferObj texel_buffer;
3661 buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
3662 texel_buffer.init(*m_device, buffer_create_info);
3663
3664 auto bvci = LvlInitStruct<VkBufferViewCreateInfo>();
3665 bvci.buffer = texel_buffer.handle();
3666 bvci.format = VK_FORMAT_R32_SFLOAT;
3667 bvci.offset = 0;
3668 bvci.range = VK_WHOLE_SIZE;
3669
3670 auto texel_bufferview = layer_data::make_unique<vk_testing::BufferView>();
3671 texel_bufferview->init(*m_device, bvci);
3672
3673 auto index_buffer_create_info = LvlInitStruct<VkBufferCreateInfo>();
3674 index_buffer_create_info.size = sizeof(uint32_t);
3675 index_buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
3676 VkBufferObj index_buffer;
3677 index_buffer.init(*m_device, index_buffer_create_info);
3678
3679 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3680 VkImageObj sampled_image(m_device);
3681 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
3682 sampled_image.Init(image_ci);
3683 auto sampled_view = layer_data::make_unique<vk_testing::ImageView>();
3684 auto imageview_ci = SafeSaneImageViewCreateInfo(sampled_image, format, VK_IMAGE_ASPECT_COLOR_BIT);
3685 sampled_view->init(*m_device, imageview_ci);
3686
3687 VkImageObj combined_image(m_device);
3688 image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL);
3689 combined_image.Init(image_ci);
3690 imageview_ci = SafeSaneImageViewCreateInfo(combined_image, format, VK_IMAGE_ASPECT_COLOR_BIT);
3691 auto combined_view = layer_data::make_unique<vk_testing::ImageView>();
3692 combined_view->init(*m_device, imageview_ci);
3693
3694 vk_testing::Sampler sampler;
3695 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
3696 sampler.init(*m_device, sampler_ci);
3697
3698 VkDescriptorImageInfo image_info[3] = {};
3699 image_info[0].sampler = sampler.handle();
3700 image_info[0].imageView = VK_NULL_HANDLE;
3701 image_info[0].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3702 image_info[1].sampler = VK_NULL_HANDLE;
3703 image_info[1].imageView = sampled_view->handle();
3704 image_info[1].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3705 image_info[2].sampler = sampler.handle();
3706 image_info[2].imageView = combined_view->handle();
3707 image_info[2].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3708
3709 // Update all descriptors
3710 std::array<VkWriteDescriptorSet, kNumDescriptors> descriptor_writes;
3711 descriptor_writes[0] = LvlInitStruct<VkWriteDescriptorSet>();
3712 descriptor_writes[0].dstSet = descriptor_set.set_;
3713 descriptor_writes[0].dstBinding = 0;
3714 descriptor_writes[0].descriptorCount = 1;
3715 descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
3716 descriptor_writes[0].pBufferInfo = &buffer_info[0];
3717
3718 descriptor_writes[1] = LvlInitStruct<VkWriteDescriptorSet>();
3719 descriptor_writes[1].dstSet = descriptor_set.set_;
3720 descriptor_writes[1].dstBinding = 1;
3721 descriptor_writes[1].descriptorCount = 1;
3722 descriptor_writes[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
3723 descriptor_writes[1].pBufferInfo = &buffer_info[1];
3724
3725 descriptor_writes[2] = LvlInitStruct<VkWriteDescriptorSet>();
3726 descriptor_writes[2].dstSet = descriptor_set.set_;
3727 descriptor_writes[2].dstBinding = 2;
3728 descriptor_writes[2].descriptorCount = 1;
3729 descriptor_writes[2].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
3730 descriptor_writes[2].pTexelBufferView = &texel_bufferview->handle();
3731
3732 descriptor_writes[3] = LvlInitStruct<VkWriteDescriptorSet>();
3733 descriptor_writes[3].dstSet = descriptor_set.set_;
3734 descriptor_writes[3].dstBinding = 3;
3735 descriptor_writes[3].descriptorCount = 1;
3736 descriptor_writes[3].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
3737 descriptor_writes[3].pImageInfo = &image_info[0];
3738
3739 descriptor_writes[4] = LvlInitStruct<VkWriteDescriptorSet>();
3740 descriptor_writes[4].dstSet = descriptor_set.set_;
3741 descriptor_writes[4].dstBinding = 4;
3742 descriptor_writes[4].descriptorCount = 1;
3743 descriptor_writes[4].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
3744 descriptor_writes[4].pImageInfo = &image_info[1];
3745
3746 descriptor_writes[5] = LvlInitStruct<VkWriteDescriptorSet>();
3747 descriptor_writes[5].dstSet = descriptor_set.set_;
3748 descriptor_writes[5].dstBinding = 5;
3749 descriptor_writes[5].descriptorCount = 1;
3750 descriptor_writes[5].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
3751 descriptor_writes[5].pImageInfo = &image_info[2];
3752
3753 vk::UpdateDescriptorSets(m_device->device(), descriptor_writes.size(), descriptor_writes.data(), 0, NULL);
3754
3755 // only descriptor 0 is used, the rest are going to get destroyed
3756 char const *shader_source = R"glsl(
3757 #version 450
3758 layout(set = 0, binding = 0) uniform foo_0 { int val; } doit;
3759 layout(set = 0, binding = 1) uniform foo_1 { int val; } readit;
3760 layout(set = 0, binding = 2) uniform samplerBuffer texels;
3761 layout(set = 0, binding = 3) uniform sampler samp;
3762 layout(set = 0, binding = 4) uniform texture2D img;
3763 layout(set = 0, binding = 5) uniform sampler2D sampled_image;
3764
3765 void main() {
3766 vec4 x;
3767 vec4 y;
3768 vec4 z;
3769 if (doit.val == 0) {
3770 gl_Position = vec4(0.0);
3771 x = vec4(0.0);
3772 y = vec4(0.0);
3773 z = vec4(0.0);
3774 } else {
3775 gl_Position = vec4(readit.val);
3776 x = texelFetch(texels, 5);
3777 y = texture(sampler2D(img, samp), vec2(0));
3778 z = texture(sampled_image, vec2(0));
3779 }
3780 }
3781 )glsl";
3782
3783 VkShaderObj vs(this, shader_source, VK_SHADER_STAGE_VERTEX_BIT);
3784 VkPipelineObj pipe(m_device);
3785 pipe.AddShader(&vs);
3786 pipe.AddDefaultColorAttachment();
3787 pipe.CreateVKPipeline(pipeline_layout.handle(), m_renderPass);
sfricke-samsung6fc3e322022-02-15 22:41:29 -08003788 VkCommandBufferBeginInfo begin_info = LvlInitStruct<VkCommandBufferBeginInfo>();
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003789 m_commandBuffer->begin(&begin_info);
3790 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
3791 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
3792 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
3793 &descriptor_set.set_, 0, nullptr);
3794
3795 // destroy resources for the unused descriptors
3796 buffer.reset();
3797 texel_bufferview.reset();
3798 sampled_view.reset();
3799 combined_view.reset();
3800
3801 vk::CmdBindIndexBuffer(m_commandBuffer->handle(), index_buffer.handle(), 0, VK_INDEX_TYPE_UINT32);
3802 VkViewport viewport = {0, 0, 16, 16, 0, 1};
3803 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
3804 VkRect2D scissor = {{0, 0}, {16, 16}};
3805 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
3806 vk::CmdDrawIndexed(m_commandBuffer->handle(), 1, 1, 0, 0, 0);
3807 vk::CmdEndRenderPass(m_commandBuffer->handle());
3808 m_commandBuffer->end();
3809 m_commandBuffer->QueueCommandBuffer();
3810 vk::QueueWaitIdle(m_device->m_queue);
Jeremy Gebben9b6f0532022-02-02 11:10:31 -07003811}
ziga-lunargc71f1a92022-03-23 23:08:35 +01003812
ziga-lunarg26ba4b92022-03-24 16:43:03 +01003813TEST_F(VkSyncValTest, TestInvalidExternalSubpassDependency) {
3814 TEST_DESCRIPTION("Test write after write hazard with invalid external subpass dependency");
3815
3816 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3817 ASSERT_NO_FATAL_FAILURE(InitState());
3818
3819 VkSubpassDependency subpass_dependency = {};
3820 subpass_dependency.srcSubpass = 0;
3821 subpass_dependency.dstSubpass = VK_SUBPASS_EXTERNAL;
3822 subpass_dependency.srcStageMask = 0;
3823 subpass_dependency.dstStageMask = 0;
3824 subpass_dependency.srcAccessMask = 0;
3825 subpass_dependency.dstAccessMask = 0;
3826 subpass_dependency.dependencyFlags = 0;
3827
3828 VkAttachmentReference attach_ref1 = {};
3829 attach_ref1.attachment = 0;
3830 attach_ref1.layout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL;
3831 VkAttachmentReference attach_ref2 = {};
3832 attach_ref2.attachment = 0;
3833 attach_ref2.layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
3834
3835 VkSubpassDescription subpass_descriptions[2] = {};
3836 subpass_descriptions[0].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
3837 subpass_descriptions[0].pDepthStencilAttachment = &attach_ref1;
3838 subpass_descriptions[1].pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
3839 subpass_descriptions[1].pDepthStencilAttachment = &attach_ref2;
3840
3841 VkAttachmentDescription attachment_description = {};
3842 attachment_description.format = VK_FORMAT_D32_SFLOAT;
3843 attachment_description.samples = VK_SAMPLE_COUNT_1_BIT;
3844 attachment_description.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
3845 attachment_description.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
3846 attachment_description.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
3847 attachment_description.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
3848 attachment_description.initialLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL;
3849 attachment_description.finalLayout = VK_IMAGE_LAYOUT_GENERAL;
3850
3851 auto rp_ci = LvlInitStruct<VkRenderPassCreateInfo>();
3852 rp_ci.subpassCount = 1;
3853 rp_ci.pSubpasses = subpass_descriptions;
3854 rp_ci.attachmentCount = 1;
3855 rp_ci.pAttachments = &attachment_description;
3856 rp_ci.dependencyCount = 1;
3857 rp_ci.pDependencies = &subpass_dependency;
3858
3859 vk_testing::RenderPass render_pass;
3860 render_pass.init(*m_device, rp_ci);
3861
3862 VkClearValue clear_value = {};
3863 clear_value.color = {{0, 0, 0, 0}};
3864
3865 VkImageCreateInfo image_ci = LvlInitStruct<VkImageCreateInfo>();
3866 image_ci.imageType = VK_IMAGE_TYPE_2D;
3867 image_ci.format = VK_FORMAT_D32_SFLOAT;
3868 image_ci.extent.width = 32;
3869 image_ci.extent.height = 32;
3870 image_ci.extent.depth = 1;
3871 image_ci.mipLevels = 1;
3872 image_ci.arrayLayers = 1;
3873 image_ci.samples = VK_SAMPLE_COUNT_1_BIT;
3874 image_ci.tiling = VK_IMAGE_TILING_OPTIMAL;
3875 image_ci.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
3876
3877 VkImageObj image1(m_device);
3878 image1.init(&image_ci);
3879 ASSERT_TRUE(image1.initialized());
3880
3881 vk_testing::ImageView image_view1;
3882 VkImageViewCreateInfo iv_ci = LvlInitStruct<VkImageViewCreateInfo>();
3883 iv_ci.image = image1.handle();
3884 iv_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
3885 iv_ci.format = VK_FORMAT_D32_SFLOAT;
3886 iv_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
3887 iv_ci.subresourceRange.baseMipLevel = 0;
3888 iv_ci.subresourceRange.levelCount = 1;
3889 iv_ci.subresourceRange.baseArrayLayer = 0;
3890 iv_ci.subresourceRange.layerCount = 1;
3891 image_view1.init(*m_device, iv_ci);
3892
3893 VkImageView framebuffer_attachments[1] = {image_view1.handle()};
3894
3895 auto fb_ci = LvlInitStruct<VkFramebufferCreateInfo>();
3896 fb_ci.renderPass = render_pass.handle();
3897 fb_ci.attachmentCount = 1;
3898 fb_ci.pAttachments = framebuffer_attachments;
3899 fb_ci.width = 32;
3900 fb_ci.height = 32;
3901 fb_ci.layers = 1;
3902
3903 vk_testing::Framebuffer framebuffer;
3904 framebuffer.init(*m_device, fb_ci);
3905
3906 auto rp_bi = LvlInitStruct<VkRenderPassBeginInfo>();
3907 rp_bi.renderPass = render_pass.handle();
3908 rp_bi.framebuffer = framebuffer.handle();
3909 rp_bi.renderArea.extent.width = 32;
3910 rp_bi.renderArea.extent.height = 32;
3911 rp_bi.clearValueCount = 1;
3912 rp_bi.pClearValues = &clear_value;
3913
3914 auto ds_ci = LvlInitStruct<VkPipelineDepthStencilStateCreateInfo>();
3915 ds_ci.depthTestEnable = VK_FALSE;
3916 ds_ci.depthWriteEnable = VK_FALSE;
3917 ds_ci.depthCompareOp = VK_COMPARE_OP_NEVER;
3918
3919 CreatePipelineHelper pipe(*this);
3920 pipe.InitInfo();
3921 pipe.gp_ci_.renderPass = render_pass.handle();
3922 pipe.gp_ci_.pDepthStencilState = &ds_ci;
3923 pipe.InitState();
3924 ASSERT_VK_SUCCESS(pipe.CreateGraphicsPipeline());
3925
3926 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3927
3928 m_commandBuffer->begin();
3929 m_commandBuffer->BeginRenderPass(rp_bi);
3930 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
3931 vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
3932 m_commandBuffer->EndRenderPass();
3933 m_commandBuffer->end();
3934
3935 m_errorMonitor->VerifyFound();
3936}
3937
ziga-lunargc71f1a92022-03-23 23:08:35 +01003938TEST_F(VkSyncValTest, TestCopyingToCompressedImage) {
3939 TEST_DESCRIPTION("Copy from uncompressed to compressed image with and without overlap.");
3940
3941 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
3942 bool copy_commands_2 = false;
3943 if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME)) {
3944 m_device_extension_names.push_back(VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME);
3945 copy_commands_2 = true;
3946 }
3947 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
3948
3949 VkFormatProperties format_properties;
3950 VkFormat mp_format = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
3951 vk::GetPhysicalDeviceFormatProperties(gpu(), mp_format, &format_properties);
3952 if ((format_properties.linearTilingFeatures & VK_FORMAT_FEATURE_TRANSFER_DST_BIT) == 0) {
3953 printf(
3954 "%s Device does not support VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT for VK_FORMAT_BC1_RGBA_UNORM_BLOCK, skipping test.\n",
3955 kSkipPrefix);
3956 return;
3957 }
3958
3959 VkImageObj src_image(m_device);
3960 src_image.Init(1, 1, 1, VK_FORMAT_R32G32_UINT, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_LINEAR);
3961 VkImageObj dst_image(m_device);
3962 dst_image.Init(12, 4, 1, VK_FORMAT_BC1_RGBA_UNORM_BLOCK, VK_IMAGE_USAGE_TRANSFER_DST_BIT, VK_IMAGE_TILING_LINEAR);
3963
3964 VkImageCopy copy_regions[2] = {};
3965 copy_regions[0].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3966 copy_regions[0].srcSubresource.mipLevel = 0;
3967 copy_regions[0].srcSubresource.baseArrayLayer = 0;
3968 copy_regions[0].srcSubresource.layerCount = 1;
3969 copy_regions[0].srcOffset = {0, 0, 0};
3970 copy_regions[0].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3971 copy_regions[0].dstSubresource.mipLevel = 0;
3972 copy_regions[0].dstSubresource.baseArrayLayer = 0;
3973 copy_regions[0].dstSubresource.layerCount = 1;
3974 copy_regions[0].dstOffset = {0, 0, 0};
3975 copy_regions[0].extent = {1, 1, 1};
3976 copy_regions[1].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3977 copy_regions[1].srcSubresource.mipLevel = 0;
3978 copy_regions[1].srcSubresource.baseArrayLayer = 0;
3979 copy_regions[1].srcSubresource.layerCount = 1;
3980 copy_regions[1].srcOffset = {0, 0, 0};
3981 copy_regions[1].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
3982 copy_regions[1].dstSubresource.mipLevel = 0;
3983 copy_regions[1].dstSubresource.baseArrayLayer = 0;
3984 copy_regions[1].dstSubresource.layerCount = 1;
3985 copy_regions[1].dstOffset = {4, 0, 0};
3986 copy_regions[1].extent = {1, 1, 1};
3987
3988 m_commandBuffer->begin();
3989
ziga-lunargc71f1a92022-03-23 23:08:35 +01003990 vk::CmdCopyImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(),
3991 VK_IMAGE_LAYOUT_GENERAL, 1, &copy_regions[0]);
3992 vk::CmdCopyImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(),
3993 VK_IMAGE_LAYOUT_GENERAL, 1, &copy_regions[1]);
ziga-lunargc71f1a92022-03-23 23:08:35 +01003994 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "SYNC-HAZARD-WRITE_AFTER_WRITE");
3995 copy_regions[1].dstOffset = {7, 0, 0};
3996 vk::CmdCopyImage(m_commandBuffer->handle(), src_image.handle(), VK_IMAGE_LAYOUT_GENERAL, dst_image.handle(),
3997 VK_IMAGE_LAYOUT_GENERAL, 1, &copy_regions[1]);
3998 m_errorMonitor->VerifyFound();
3999
4000 m_commandBuffer->end();
4001
4002 if (copy_commands_2) {
4003 auto vkCmdCopyImage2KHR =
4004 reinterpret_cast<PFN_vkCmdCopyImage2KHR>(vk::GetInstanceProcAddr(instance(), "vkCmdCopyImage2KHR"));
4005 assert(vkCmdCopyImage2KHR != nullptr);
4006
4007 m_commandBuffer->reset();
4008
4009 VkImageCopy2KHR copy_regions2[2];
4010 copy_regions2[0] = LvlInitStruct<VkImageCopy2KHR>();
4011 copy_regions2[0].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4012 copy_regions2[0].srcSubresource.mipLevel = 0;
4013 copy_regions2[0].srcSubresource.baseArrayLayer = 0;
4014 copy_regions2[0].srcSubresource.layerCount = 1;
4015 copy_regions2[0].srcOffset = {0, 0, 0};
4016 copy_regions2[0].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4017 copy_regions2[0].dstSubresource.mipLevel = 0;
4018 copy_regions2[0].dstSubresource.baseArrayLayer = 0;
4019 copy_regions2[0].dstSubresource.layerCount = 1;
4020 copy_regions2[0].dstOffset = {0, 0, 0};
4021 copy_regions2[0].extent = {1, 1, 1};
4022 copy_regions2[1] = LvlInitStruct<VkImageCopy2KHR>();
4023 copy_regions2[1].srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4024 copy_regions2[1].srcSubresource.mipLevel = 0;
4025 copy_regions2[1].srcSubresource.baseArrayLayer = 0;
4026 copy_regions2[1].srcSubresource.layerCount = 1;
4027 copy_regions2[1].srcOffset = {0, 0, 0};
4028 copy_regions2[1].dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
4029 copy_regions2[1].dstSubresource.mipLevel = 0;
4030 copy_regions2[1].dstSubresource.baseArrayLayer = 0;
4031 copy_regions2[1].dstSubresource.layerCount = 1;
4032 copy_regions2[1].dstOffset = {4, 0, 0};
4033 copy_regions2[1].extent = {1, 1, 1};
4034
4035 auto copy_image_info = LvlInitStruct<VkCopyImageInfo2KHR>();
4036 copy_image_info.srcImage = src_image.handle();
4037 copy_image_info.srcImageLayout = VK_IMAGE_LAYOUT_GENERAL;
4038 copy_image_info.dstImage = dst_image.handle();
4039 copy_image_info.dstImageLayout = VK_IMAGE_LAYOUT_GENERAL;
4040 copy_image_info.regionCount = 2;
4041 copy_image_info.pRegions = copy_regions2;
4042
4043 m_commandBuffer->begin();
4044
ziga-lunargc71f1a92022-03-23 23:08:35 +01004045 vkCmdCopyImage2KHR(m_commandBuffer->handle(), &copy_image_info);
ziga-lunargc71f1a92022-03-23 23:08:35 +01004046 m_errorMonitor->SetDesiredFailureMsg(kErrorBit, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4047 copy_image_info.regionCount = 1;
4048 copy_image_info.pRegions = &copy_regions2[1];
4049 copy_regions[1].dstOffset = {7, 0, 0};
4050 vkCmdCopyImage2KHR(m_commandBuffer->handle(), &copy_image_info);
4051 m_errorMonitor->VerifyFound();
4052
4053 m_commandBuffer->end();
4054 }
4055}
John Zulaufd79e34f2022-04-20 16:39:59 -06004056
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004057TEST_F(VkSyncValTest, StageAccessExpansion) {
4058 SetTargetApiVersion(VK_API_VERSION_1_2);
4059
4060 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework());
4061 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4062 ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
4063
4064 VkImageUsageFlags image_usage_combine = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT |
4065 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4066 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
4067 VkImageObj image_c_a(m_device), image_c_b(m_device);
4068 const auto image_c_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_combine, VK_IMAGE_TILING_OPTIMAL);
4069 image_c_a.Init(image_c_ci);
4070 image_c_b.Init(image_c_ci);
4071
4072 VkImageView imageview_c = image_c_a.targetView(format);
4073 VkImageUsageFlags image_usage_storage =
4074 VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4075 VkImageObj image_s_a(m_device), image_s_b(m_device);
4076 const auto image_s_ci = VkImageObj::ImageCreateInfo2D(16, 16, 1, 1, format, image_usage_storage, VK_IMAGE_TILING_OPTIMAL);
4077 image_s_a.Init(image_s_ci);
4078 image_s_b.Init(image_s_ci);
4079 image_s_a.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4080 image_s_b.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4081
4082 VkImageView imageview_s = image_s_a.targetView(format);
4083
4084 vk_testing::Sampler sampler_s, sampler_c;
4085 VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
4086 sampler_s.init(*m_device, sampler_ci);
4087 sampler_c.init(*m_device, sampler_ci);
4088
4089 VkBufferObj buffer_a, buffer_b;
4090 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4091 VkBufferUsageFlags buffer_usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT |
4092 VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4093 buffer_a.init(*m_device, buffer_a.create_info(2048, buffer_usage, nullptr), mem_prop);
4094 buffer_b.init(*m_device, buffer_b.create_info(2048, buffer_usage, nullptr), mem_prop);
4095
4096 vk_testing::BufferView bufferview;
4097 auto bvci = LvlInitStruct<VkBufferViewCreateInfo>();
4098 bvci.buffer = buffer_a.handle();
4099 bvci.format = VK_FORMAT_R32_SFLOAT;
4100 bvci.offset = 0;
4101 bvci.range = VK_WHOLE_SIZE;
4102
4103 bufferview.init(*m_device, bvci);
4104
4105 OneOffDescriptorSet descriptor_set(m_device,
4106 {
4107 {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
4108 {1, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
4109 {2, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
4110 {3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
4111 });
4112
4113 descriptor_set.WriteDescriptorBufferInfo(0, buffer_a.handle(), 0, 2048);
4114 descriptor_set.WriteDescriptorImageInfo(1, imageview_c, sampler_c.handle(), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
4115 VK_IMAGE_LAYOUT_GENERAL);
4116 descriptor_set.WriteDescriptorImageInfo(2, imageview_s, sampler_s.handle(), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
4117 VK_IMAGE_LAYOUT_GENERAL);
4118 descriptor_set.WriteDescriptorBufferView(3, bufferview.handle());
4119 descriptor_set.UpdateDescriptorSets();
4120
4121 // Dispatch
4122 std::string csSource = R"glsl(
4123 #version 450
4124 layout(set=0, binding=0) uniform foo { float x; } ub0;
4125 layout(set=0, binding=1) uniform sampler2D cis1;
4126 layout(set=0, binding=2, rgba8) uniform readonly image2D si2;
4127 layout(set=0, binding=3, r32f) uniform readonly imageBuffer stb3;
4128 void main(){
4129 vec4 vColor4;
4130 vColor4.x = ub0.x;
4131 vColor4 = texture(cis1, vec2(0));
4132 vColor4 = imageLoad(si2, ivec2(0));
4133 vColor4 = imageLoad(stb3, 0);
4134 }
4135 )glsl";
4136
4137 // Draw
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004138 const float vbo_data[3] = {1.f, 0.f, 1.f};
4139 VkVertexInputAttributeDescription VertexInputAttributeDescription = {0, 0, VK_FORMAT_R32G32B32_SFLOAT, sizeof(vbo_data)};
4140 VkVertexInputBindingDescription VertexInputBindingDescription = {0, sizeof(vbo_data), VK_VERTEX_INPUT_RATE_VERTEX};
4141 VkBufferObj vbo, vbo2;
4142 buffer_usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4143 vbo.init(*m_device, vbo.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
4144 vbo2.init(*m_device, vbo2.create_info(sizeof(vbo_data), buffer_usage, nullptr), mem_prop);
4145
4146 VkShaderObj vs(this, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT);
Nathaniel Cesario2c8e1942022-06-21 09:15:19 -06004147 VkShaderObj fs(this, csSource.c_str(), VK_SHADER_STAGE_FRAGMENT_BIT);
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004148
4149 CreatePipelineHelper g_pipe(*this);
4150 g_pipe.InitInfo();
4151 g_pipe.InitState();
4152 g_pipe.vi_ci_.pVertexBindingDescriptions = &VertexInputBindingDescription;
4153 g_pipe.vi_ci_.vertexBindingDescriptionCount = 1;
4154 g_pipe.vi_ci_.pVertexAttributeDescriptions = &VertexInputAttributeDescription;
4155 g_pipe.vi_ci_.vertexAttributeDescriptionCount = 1;
4156 g_pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
4157 g_pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
4158 ASSERT_VK_SUCCESS(g_pipe.CreateGraphicsPipeline());
4159
4160 m_commandBuffer->reset();
4161 m_commandBuffer->begin();
4162 VkImageSubresourceLayers layer{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
4163 VkOffset3D zero_offset{0, 0, 0};
4164 VkExtent3D full_extent{16, 16, 1};
4165 VkImageCopy image_region = {layer, zero_offset, layer, zero_offset, full_extent};
4166 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
4167 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4168 vk::CmdCopyImage(m_commandBuffer->handle(), image_s_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s_a.handle(),
4169 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4170
4171 auto barrier = LvlInitStruct<VkMemoryBarrier>();
4172 barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
4173 barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
4174
4175 // wrong: dst stage should be VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
4176 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 1,
4177 &barrier, 0, nullptr, 0, nullptr);
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004178
4179 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
4180 VkDeviceSize offset = 0;
4181 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
4182
4183 VkViewport viewport = {0, 0, 16, 16, 0, 1};
4184 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
4185 VkRect2D scissor = {{0, 0}, {16, 16}};
4186 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
4187
4188 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
4189 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
4190 &descriptor_set.set_, 0, nullptr);
4191
4192 // one error for each image copied above
4193 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
4194 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-READ_AFTER_WRITE");
4195 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
4196 m_errorMonitor->VerifyFound();
4197
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004198 m_commandBuffer->EndRenderPass();
4199 m_commandBuffer->end();
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004200
4201 // Try again with the correct dst stage on the barrier
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004202 m_commandBuffer->reset();
4203 m_commandBuffer->begin();
4204 vk::CmdCopyImage(m_commandBuffer->handle(), image_c_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_c_a.handle(),
4205 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4206 vk::CmdCopyImage(m_commandBuffer->handle(), image_s_b.handle(), VK_IMAGE_LAYOUT_GENERAL, image_s_a.handle(),
4207 VK_IMAGE_LAYOUT_GENERAL, 1, &image_region);
4208
4209 vk::CmdPipelineBarrier(m_commandBuffer->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 1,
4210 &barrier, 0, nullptr, 0, nullptr);
4211
4212 m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
4213 vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &vbo.handle(), &offset);
4214
4215 vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
4216 vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
4217
4218 vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_);
4219 vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, g_pipe.pipeline_layout_.handle(), 0, 1,
4220 &descriptor_set.set_, 0, nullptr);
4221 vk::CmdDraw(m_commandBuffer->handle(), 1, 0, 0, 0);
4222 m_commandBuffer->EndRenderPass();
4223 m_commandBuffer->end();
Jeremy Gebbena0e0e772022-06-08 14:41:43 -06004224}
4225
John Zulaufb66ee052022-06-10 16:52:28 -06004226struct QSTestContext {
John Zulaufc55f4702022-07-15 12:16:34 -06004227 VkDeviceObj* dev;
John Zulaufb66ee052022-06-10 16:52:28 -06004228 uint32_t q_fam = ~0U;
John Zulauf6df2d5c2022-05-28 13:02:21 -06004229 VkQueue q0 = VK_NULL_HANDLE;
4230 VkQueue q1 = VK_NULL_HANDLE;
John Zulaufb66ee052022-06-10 16:52:28 -06004231
4232 VkBufferObj buffer_a;
4233 VkBufferObj buffer_b;
4234 VkBufferObj buffer_c;
4235
4236 VkBufferCopy region;
4237 VkCommandPoolObj pool;
4238
4239 VkCommandBufferObj cba;
4240 VkCommandBufferObj cbb;
4241 VkCommandBufferObj cbc;
4242
4243 VkCommandBuffer h_cba = VK_NULL_HANDLE;
4244 VkCommandBuffer h_cbb = VK_NULL_HANDLE;
4245 VkCommandBuffer h_cbc = VK_NULL_HANDLE;
4246
4247 vk_testing::Semaphore semaphore;
4248 vk_testing::Event event;
4249
4250 VkCommandBufferObj* current_cb = nullptr;
4251
John Zulaufc55f4702022-07-15 12:16:34 -06004252 QSTestContext(VkDeviceObj* device, VkQueueObj* force_q0 = nullptr, VkQueueObj* force_q1 = nullptr);
John Zulaufaa7ee262022-08-01 18:10:28 -06004253 VkCommandBuffer InitFromPool(VkCommandBufferObj& cb_obj);
John Zulaufb66ee052022-06-10 16:52:28 -06004254 bool Valid() const { return q1 != VK_NULL_HANDLE; }
4255
4256 void Begin(VkCommandBufferObj& cb);
4257 void BeginA() { Begin(cba); }
4258 void BeginB() { Begin(cbb); }
4259 void BeginC() { Begin(cbc); }
4260
4261 void End();
4262
4263 void CopyAToB() { vk::CmdCopyBuffer(current_cb->handle(), buffer_a.handle(), buffer_b.handle(), 1, &region); }
4264 void CopyAToC() { vk::CmdCopyBuffer(current_cb->handle(), buffer_a.handle(), buffer_c.handle(), 1, &region); }
4265
4266 void CopyBToA() { vk::CmdCopyBuffer(current_cb->handle(), buffer_b.handle(), buffer_a.handle(), 1, &region); }
4267 void CopyBToC() { vk::CmdCopyBuffer(current_cb->handle(), buffer_b.handle(), buffer_c.handle(), 1, &region); }
4268
4269 void CopyCToA() { vk::CmdCopyBuffer(current_cb->handle(), buffer_c.handle(), buffer_a.handle(), 1, &region); }
4270 void CopyCToB() { vk::CmdCopyBuffer(current_cb->handle(), buffer_c.handle(), buffer_b.handle(), 1, &region); }
4271
John Zulauf46f5d6b2022-06-30 12:38:34 -06004272 void CopyGeneral(const VkImageObj& from, const VkImageObj& to, const VkImageCopy& region) {
4273 vk::CmdCopyImage(current_cb->handle(), from.handle(), VK_IMAGE_LAYOUT_GENERAL, to.handle(), VK_IMAGE_LAYOUT_GENERAL, 1,
4274 &region);
4275 };
4276
John Zulaufb66ee052022-06-10 16:52:28 -06004277 VkBufferMemoryBarrier InitBufferBarrier(const VkBufferObj& buffer);
4278 void TransferBarrier(const VkBufferObj& buffer);
4279 void TransferBarrier(const VkBufferMemoryBarrier& buffer_barrier);
4280
John Zulaufc55f4702022-07-15 12:16:34 -06004281 void Submit(VkQueue q, VkCommandBufferObj& cb, VkSemaphore wait = VK_NULL_HANDLE,
John Zulaufaa7ee262022-08-01 18:10:28 -06004282 VkPipelineStageFlags wait_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VkSemaphore signal = VK_NULL_HANDLE,
4283 VkFence fence = VK_NULL_HANDLE);
John Zulaufb66ee052022-06-10 16:52:28 -06004284
John Zulaufc55f4702022-07-15 12:16:34 -06004285 void Submit0(VkCommandBufferObj& cb, VkSemaphore wait = VK_NULL_HANDLE,
John Zulaufaa7ee262022-08-01 18:10:28 -06004286 VkPipelineStageFlags wait_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VkSemaphore signal = VK_NULL_HANDLE,
4287 VkFence fence = VK_NULL_HANDLE) {
4288 Submit(q0, cb, wait, wait_mask, signal, fence);
John Zulaufb66ee052022-06-10 16:52:28 -06004289 }
4290 void Submit0Wait(VkCommandBufferObj& cb, VkPipelineStageFlags wait_mask) { Submit0(cb, semaphore.handle(), wait_mask); }
4291 void Submit0Signal(VkCommandBufferObj& cb) { Submit0(cb, VK_NULL_HANDLE, 0U, semaphore.handle()); }
4292
John Zulaufc55f4702022-07-15 12:16:34 -06004293 void Submit1(VkCommandBufferObj& cb, VkSemaphore wait = VK_NULL_HANDLE,
John Zulaufaa7ee262022-08-01 18:10:28 -06004294 VkPipelineStageFlags wait_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VkSemaphore signal = VK_NULL_HANDLE,
4295 VkFence fence = VK_NULL_HANDLE) {
4296 Submit(q1, cb, wait, wait_mask, signal, fence);
John Zulaufb66ee052022-06-10 16:52:28 -06004297 }
4298 void Submit1Wait(VkCommandBufferObj& cb, VkPipelineStageFlags wait_mask) { Submit1(cb, semaphore.handle(), wait_mask); }
4299 void Submit1Signal(VkCommandBufferObj& cb, VkPipelineStageFlags signal_mask) {
4300 Submit1(cb, VK_NULL_HANDLE, 0U, semaphore.handle());
4301 }
4302 void SetEvent(VkPipelineStageFlags src_mask) { event.cmd_set(*current_cb, src_mask); }
4303 void WaitEventBufferTransfer(VkBufferObj& buffer, VkPipelineStageFlags src_mask, VkPipelineStageFlags dst_mask) {
4304 std::vector<VkBufferMemoryBarrier> buffer_barriers(1, InitBufferBarrier(buffer));
4305 event.cmd_wait(*current_cb, src_mask, dst_mask, std::vector<VkMemoryBarrier>(), buffer_barriers,
4306 std::vector<VkImageMemoryBarrier>());
4307 }
John Zulaufc55f4702022-07-15 12:16:34 -06004308 void QueueWait(VkQueue q) { vk::QueueWaitIdle(q); }
4309 void QueueWait0() { QueueWait(q0); }
4310 void QueueWait1() { QueueWait(q1); }
4311 void DeviceWait() { vk::DeviceWaitIdle(dev->handle()); }
John Zulaufb66ee052022-06-10 16:52:28 -06004312};
4313
John Zulaufc55f4702022-07-15 12:16:34 -06004314QSTestContext::QSTestContext(VkDeviceObj* device, VkQueueObj* force_q0, VkQueueObj* force_q1)
4315 : dev(device), q0(VK_NULL_HANDLE), q1(VK_NULL_HANDLE) {
4316 if (force_q0) {
4317 q0 = force_q0->handle();
4318 q_fam = force_q0->get_family_index();
4319 if (force_q1) {
4320 // The object has some assumptions that the queues are from the the same family, so enforce this here
4321 if (force_q1->get_family_index() == q_fam) {
4322 q1 = force_q1->handle();
4323 }
4324 } else {
4325 q1 = q0; // Allow the two queues to be the same and valid if forced
4326 }
4327 } else {
4328 const auto& queues = device->dma_queues();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004329
John Zulaufc55f4702022-07-15 12:16:34 -06004330 const uint32_t q_count = static_cast<uint32_t>(queues.size());
4331 for (uint32_t q0_index = 0; q0_index < q_count; ++q0_index) {
4332 const auto* q0_entry = queues[q0_index];
4333 q0 = q0_entry->handle();
4334 q_fam = q0_entry->get_family_index();
4335 for (uint32_t q1_index = (q0_index + 1); q1_index < q_count; ++q1_index) {
4336 const auto* q1_entry = queues[q1_index];
4337 if (q_fam == q1_entry->get_family_index()) {
4338 q1 = q1_entry->handle();
4339 break;
4340 }
4341 }
4342 if (Valid()) {
John Zulauf6df2d5c2022-05-28 13:02:21 -06004343 break;
4344 }
4345 }
John Zulauf6df2d5c2022-05-28 13:02:21 -06004346 }
John Zulaufc55f4702022-07-15 12:16:34 -06004347
John Zulaufb66ee052022-06-10 16:52:28 -06004348 if (!Valid()) return;
4349
4350 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4351 buffer_a.init_as_src_and_dst(*device, 256, mem_prop);
4352 buffer_b.init_as_src_and_dst(*device, 256, mem_prop);
4353 buffer_c.init_as_src_and_dst(*device, 256, mem_prop);
4354
4355 region = {0, 0, 256};
4356
4357 pool.Init(device, q_fam, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004358
John Zulaufaa7ee262022-08-01 18:10:28 -06004359 h_cba = InitFromPool(cba);
4360 h_cbb = InitFromPool(cbb);
4361 h_cbc = InitFromPool(cbc);
John Zulaufb66ee052022-06-10 16:52:28 -06004362
4363 auto semaphore_ci = LvlInitStruct<VkSemaphoreCreateInfo>();
4364 semaphore.init(*device, semaphore_ci);
4365
4366 VkEventCreateInfo eci = LvlInitStruct<VkEventCreateInfo>();
4367 event.init(*device, eci);
4368}
4369
John Zulaufaa7ee262022-08-01 18:10:28 -06004370VkCommandBuffer QSTestContext::InitFromPool(VkCommandBufferObj& cb_obj) {
4371 cb_obj.Init(dev, &pool);
4372 return cb_obj.handle();
4373}
4374
John Zulaufb66ee052022-06-10 16:52:28 -06004375void QSTestContext::Begin(VkCommandBufferObj& cb) {
John Zulaufc55f4702022-07-15 12:16:34 -06004376 VkCommandBufferBeginInfo info = LvlInitStruct<VkCommandBufferBeginInfo>();
4377 info.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
4378 info.pInheritanceInfo = nullptr;
4379
John Zulaufb66ee052022-06-10 16:52:28 -06004380 cb.reset();
John Zulaufc55f4702022-07-15 12:16:34 -06004381 cb.begin(&info);
John Zulaufb66ee052022-06-10 16:52:28 -06004382 current_cb = &cb;
4383}
4384
4385void QSTestContext::End() {
4386 current_cb->end();
4387 current_cb = nullptr;
4388}
4389
4390VkBufferMemoryBarrier QSTestContext::InitBufferBarrier(const VkBufferObj& buffer) {
4391 auto buffer_barrier = LvlInitStruct<VkBufferMemoryBarrier>();
4392 buffer_barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
4393 buffer_barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
4394 buffer_barrier.buffer = buffer.handle();
4395 buffer_barrier.offset = 0;
4396 buffer_barrier.size = 256;
4397 return buffer_barrier;
4398}
4399
4400void QSTestContext::TransferBarrier(const VkBufferMemoryBarrier& buffer_barrier) {
4401 vk::CmdPipelineBarrier(current_cb->handle(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, nullptr, 1,
4402 &buffer_barrier, 0, nullptr);
4403}
4404
4405void QSTestContext::TransferBarrier(const VkBufferObj& buffer) { TransferBarrier(InitBufferBarrier(buffer)); }
4406
John Zulaufaa7ee262022-08-01 18:10:28 -06004407void QSTestContext::Submit(VkQueue q, VkCommandBufferObj& cb, VkSemaphore wait, VkPipelineStageFlags wait_mask, VkSemaphore signal,
4408 VkFence fence) {
John Zulaufb66ee052022-06-10 16:52:28 -06004409 auto submit1 = lvl_init_struct<VkSubmitInfo>();
4410 submit1.commandBufferCount = 1;
4411 VkCommandBuffer h_cb = cb.handle();
4412 submit1.pCommandBuffers = &h_cb;
4413 if (wait != VK_NULL_HANDLE) {
4414 submit1.waitSemaphoreCount = 1;
4415 submit1.pWaitSemaphores = &wait;
4416 submit1.pWaitDstStageMask = &wait_mask;
4417 }
4418 if (signal != VK_NULL_HANDLE) {
4419 submit1.signalSemaphoreCount = 1;
4420 submit1.pSignalSemaphores = &signal;
4421 }
John Zulaufaa7ee262022-08-01 18:10:28 -06004422 vk::QueueSubmit(q, 1, &submit1, fence);
John Zulaufb66ee052022-06-10 16:52:28 -06004423}
4424
John Zulaufb9fad9f2022-07-15 11:10:37 -06004425TEST_F(VkSyncValTest, SyncQSBufferCopyHazards) {
4426 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4427 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4428
4429 VkBufferObj buffer_a;
4430 VkBufferObj buffer_b;
4431 VkBufferObj buffer_c;
4432 VkMemoryPropertyFlags mem_prop = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
4433 buffer_a.init_as_src_and_dst(*m_device, 256, mem_prop);
4434 buffer_b.init_as_src_and_dst(*m_device, 256, mem_prop);
4435 buffer_c.init_as_src_and_dst(*m_device, 256, mem_prop);
4436
4437 VkBufferCopy region = {0, 0, 256};
4438
4439 VkCommandBufferObj cba(m_device, m_commandPool);
4440 VkCommandBufferObj cbb(m_device, m_commandPool);
4441
4442 cba.begin();
4443 const VkCommandBuffer h_cba = cba.handle();
4444 vk::CmdCopyBuffer(h_cba, buffer_a.handle(), buffer_b.handle(), 1, &region);
4445 cba.end();
4446
4447 const VkCommandBuffer h_cbb = cbb.handle();
4448 cbb.begin();
4449 vk::CmdCopyBuffer(h_cbb, buffer_c.handle(), buffer_a.handle(), 1, &region);
4450 cbb.end();
4451
4452 auto submit1 = lvl_init_struct<VkSubmitInfo>();
4453 submit1.commandBufferCount = 2;
4454 VkCommandBuffer two_cbs[2] = {h_cba, h_cbb};
4455 submit1.pCommandBuffers = two_cbs;
4456
4457 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
4458 vk::QueueSubmit(m_device->m_queue, 1, &submit1, VK_NULL_HANDLE);
4459 m_errorMonitor->VerifyFound();
4460
4461 vk::DeviceWaitIdle(m_device->device());
4462
4463 VkSubmitInfo submit2[2] = {lvl_init_struct<VkSubmitInfo>(), lvl_init_struct<VkSubmitInfo>()};
4464 submit2[0].commandBufferCount = 1;
4465 submit2[0].pCommandBuffers = &h_cba;
4466 submit2[1].commandBufferCount = 1;
4467 submit2[1].pCommandBuffers = &h_cbb;
4468 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
4469 vk::QueueSubmit(m_device->m_queue, 2, submit2, VK_NULL_HANDLE);
4470 m_errorMonitor->VerifyFound();
4471
4472 // With the skip settings, the above QueueSubmit's didn't record, so we can treat the global queue contexts as empty
4473 submit1.commandBufferCount = 1;
4474 submit1.pCommandBuffers = &h_cba;
4475 // Submit A
John Zulaufb9fad9f2022-07-15 11:10:37 -06004476 vk::QueueSubmit(m_device->m_queue, 1, &submit1, VK_NULL_HANDLE);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004477
4478 submit1.pCommandBuffers = &h_cbb;
4479 // Submit B -- which should conflict via the queue's "last batch"
4480 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
4481 vk::QueueSubmit(m_device->m_queue, 1, &submit1, VK_NULL_HANDLE);
4482 m_errorMonitor->VerifyFound();
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004483
4484 m_device->wait();
John Zulaufb9fad9f2022-07-15 11:10:37 -06004485}
4486
4487TEST_F(VkSyncValTest, SyncQSBufferCopyVsIdle) {
4488 // TODO (jzulauf)
John Zulaufc55f4702022-07-15 12:16:34 -06004489 // GTEST_SKIP() << "this test is causing a sporadic crash on nvidia 32b release. Skip until further investigation";
John Zulaufb9fad9f2022-07-15 11:10:37 -06004490
4491 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4492 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4493
John Zulaufc55f4702022-07-15 12:16:34 -06004494 QSTestContext test(m_device, m_device->m_queue_obj);
4495 if (!test.Valid()) {
4496 GTEST_SKIP() << "Test requires a valid queue object.";
4497 }
John Zulaufb9fad9f2022-07-15 11:10:37 -06004498
John Zulaufc55f4702022-07-15 12:16:34 -06004499 test.BeginA();
4500 test.CopyAToB();
4501 test.End();
John Zulaufb9fad9f2022-07-15 11:10:37 -06004502
John Zulaufc55f4702022-07-15 12:16:34 -06004503 test.BeginB();
4504 test.CopyCToA();
4505 test.End();
John Zulaufb9fad9f2022-07-15 11:10:37 -06004506
4507 // Submit A
John Zulaufc55f4702022-07-15 12:16:34 -06004508 test.Submit0(test.cba);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004509
4510 // Submit B which hazards vs. A
John Zulaufb9fad9f2022-07-15 11:10:37 -06004511 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004512 test.Submit0(test.cbb);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004513 m_errorMonitor->VerifyFound();
4514
4515 // With the skip settings, the above QueueSubmit's didn't record, so we can treat the previous submit as not
4516 // having happened. So we'll try again with a device wait idle
4517 // Submit B again, but after idling, which should remove the hazard
John Zulaufc55f4702022-07-15 12:16:34 -06004518 test.DeviceWait();
4519 test.Submit0(test.cbb);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004520
John Zulaufc55f4702022-07-15 12:16:34 -06004521 // Submit the same command again for another hazard
John Zulaufb9fad9f2022-07-15 11:10:37 -06004522 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
John Zulaufc55f4702022-07-15 12:16:34 -06004523 test.Submit0(test.cbb);
John Zulaufb9fad9f2022-07-15 11:10:37 -06004524 m_errorMonitor->VerifyFound();
4525
4526 // With the skip settings, the above QueueSubmit's didn't record, so we can treat the previous submit as not
4527 // having happened. So we'll try again with a queue wait idle
4528 // Submit B again, but after idling, which should remove the hazard
John Zulaufc55f4702022-07-15 12:16:34 -06004529 test.QueueWait0();
4530 test.Submit0(test.cbb);
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004531
4532 m_device->wait();
John Zulaufb9fad9f2022-07-15 11:10:37 -06004533}
4534
John Zulaufaa7ee262022-08-01 18:10:28 -06004535TEST_F(VkSyncValTest, SyncQSBufferCopyVsFence) {
4536 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4537 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4538
4539 QSTestContext test(m_device, m_device->m_queue_obj);
4540 if (!test.Valid()) {
4541 GTEST_SKIP() << "Test requires a valid queue object.";
4542 }
4543
4544 vk_testing::Fence fence;
4545 fence.init(*m_device, VkFenceObj::create_info());
4546 VkFence fence_handle = fence.handle();
4547 VkResult wait_result;
4548 VkCommandBufferObj cbd;
4549 test.InitFromPool(cbd);
4550
4551 // Set up four CB with copy commands
4552 // We'll wait for the first, but not the second
4553 test.BeginA();
4554 test.CopyAToB();
4555 test.End();
4556
4557 test.BeginB();
4558 test.CopyAToC();
4559 test.End();
4560
4561 test.BeginC();
4562 test.CopyAToB();
4563 test.End();
4564
4565 // This is the one that should error
4566 test.Begin(cbd);
4567 test.CopyAToC();
4568 test.End();
4569
4570 // Two copies *better* finish in a second...
4571 const uint64_t kFourSeconds = 1U << 30;
4572 // Copy A to B
4573 test.Submit0(test.cba, VK_NULL_HANDLE, 0U, VK_NULL_HANDLE, fence_handle);
4574 // Copy A to C
4575 test.Submit0(test.cbb);
4576 // Wait for A to B
4577 wait_result = fence.wait(kFourSeconds);
4578
4579 if (wait_result != VK_SUCCESS) {
4580 ADD_FAILURE() << "Fence wait failed. Aborting test.";
4581 m_device->wait();
4582 }
4583
4584 // A and B should be good to go...
4585 test.Submit0(test.cbc);
4586
4587 // But C shouldn't
4588 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4589 test.Submit0(cbd);
4590 m_errorMonitor->VerifyFound();
4591
4592 test.DeviceWait();
4593}
4594
John Zulaufb66ee052022-06-10 16:52:28 -06004595TEST_F(VkSyncValTest, SyncQSBufferCopyQSORules) {
4596 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4597 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4598
4599 QSTestContext test(m_device);
4600 if (!test.Valid()) {
John Zulauf6df2d5c2022-05-28 13:02:21 -06004601 printf("%s Test requires at least 2 TRANSFER capable queues in the same queue_family. Skipped.\n", kSkipPrefix);
4602 return;
4603 }
4604
John Zulauf6df2d5c2022-05-28 13:02:21 -06004605 // Command Buffer A reads froms buffer A and writes to buffer B
John Zulaufb66ee052022-06-10 16:52:28 -06004606 test.BeginA();
4607 test.CopyAToB();
4608 test.End();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004609
4610 // Command Buffer B reads froms buffer C and writes to buffer A, but has a barrier to protect the write to A when
4611 // executed on the same queue, given that commands in "queue submission order" are within the first scope of the barrier.
John Zulaufb66ee052022-06-10 16:52:28 -06004612 test.BeginB();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004613
4614 // Use the barrier to clean up the WAR, which will work for command buffers ealier in queue submission order, or with
4615 // correct semaphore operations between queues.
John Zulaufb66ee052022-06-10 16:52:28 -06004616 test.TransferBarrier(test.buffer_a);
4617 test.CopyCToA();
4618 test.End();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004619
John Zulaufd060c3f2022-06-08 16:00:46 -06004620 // Command Buffer C does the same copy as B but without the barrier.
John Zulaufb66ee052022-06-10 16:52:28 -06004621 test.BeginC();
4622 test.CopyCToA();
4623 test.End();
John Zulaufd060c3f2022-06-08 16:00:46 -06004624
John Zulauf6df2d5c2022-05-28 13:02:21 -06004625 // Submit A and B on the same queue, to assure us the barrier *would* be sufficient given QSO
4626 // This is included in a "Sucess" section, just to verify CBA and CBB are set up correctly.
John Zulaufb66ee052022-06-10 16:52:28 -06004627 test.Submit0(test.cba);
4628 test.Submit0(test.cbb);
John Zulauf6df2d5c2022-05-28 13:02:21 -06004629 m_device->wait(); // DeviceWaitIdle, clearing the field for the next subcase
John Zulauf6df2d5c2022-05-28 13:02:21 -06004630
4631 // Submit A and B on the different queues. Since no semaphore is used between the queues, CB B hazards asynchronously with,
4632 // CB A with A being read and written on independent queues.
4633 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ");
John Zulaufb66ee052022-06-10 16:52:28 -06004634 test.Submit0(test.cba);
4635 test.Submit1(test.cbb);
John Zulauf6df2d5c2022-05-28 13:02:21 -06004636 m_errorMonitor->VerifyFound();
4637
4638 // Set up the semaphore for the next two cases
John Zulauf6df2d5c2022-05-28 13:02:21 -06004639
4640 m_device->wait();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004641
4642 // Submit A and B on the different queues, with an ineffectual semaphore. The wait mask is empty, thus nothing in CB B is in
4643 // the second excution scope of the waited signal.
4644 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufb66ee052022-06-10 16:52:28 -06004645 test.Submit0Signal(test.cba);
John Zulaufc55f4702022-07-15 12:16:34 -06004646 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT); // wait mask is BOTTOM, s.t. this is a wait-for-nothing.
John Zulauf6df2d5c2022-05-28 13:02:21 -06004647 m_errorMonitor->VerifyFound();
4648
4649 // The since second submit failed, it was skipped. So we can try again, without having to WaitDeviceIdle
John Zulaufb66ee052022-06-10 16:52:28 -06004650 // Include transfers in the second execution scope of the waited signal, s.t. the PipelineBarrier in CB B can chain with it.
4651 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_TRANSFER_BIT); //
John Zulaufd060c3f2022-06-08 16:00:46 -06004652
4653 m_device->wait();
4654
4655 // Draw A and then C to verify the second access scope of the signal
John Zulaufb66ee052022-06-10 16:52:28 -06004656 test.Submit0Signal(test.cba);
4657 test.Submit1Wait(test.cbc, VK_PIPELINE_STAGE_TRANSFER_BIT);
John Zulaufd060c3f2022-06-08 16:00:46 -06004658
4659 m_device->wait();
4660
4661 // ... and again on the same queue
John Zulaufb66ee052022-06-10 16:52:28 -06004662 test.Submit0Signal(test.cba);
4663 test.Submit0Wait(test.cbc, VK_PIPELINE_STAGE_TRANSFER_BIT);
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004664
4665 m_device->wait();
John Zulauf6df2d5c2022-05-28 13:02:21 -06004666}
John Zulaufb66ee052022-06-10 16:52:28 -06004667
4668TEST_F(VkSyncValTest, SyncQSBufferEvents) {
4669 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4670 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4671
4672 QSTestContext test(m_device);
4673 if (!test.Valid()) {
4674 printf("%s Test requires at least 2 TRANSFER capable queues in the same queue_family. Skipped.\n", kSkipPrefix);
4675 return;
4676 }
4677
John Zulaufb66ee052022-06-10 16:52:28 -06004678 // Command Buffer A reads froms buffer A and writes to buffer B
4679 test.BeginA();
4680 test.CopyAToB();
4681 test.SetEvent(VK_PIPELINE_STAGE_TRANSFER_BIT);
4682 test.End();
4683
4684 // Command Buffer B reads froms buffer C and writes to buffer A, but has a wait to protect the write to A when
4685 // executed on the same queue, given that commands in "queue submission order" are within the first scope of the barrier.
4686 test.BeginB();
4687
4688 // Use the barrier to clean up the WAR, which will work for command buffers ealier in queue submission order, or with
4689 // correct semaphore operations between queues.
4690 test.WaitEventBufferTransfer(test.buffer_a, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
4691 test.CopyCToA();
4692 test.End();
4693
4694 // Command Buffer C merges the operations from A and B, to ensure the set/wait is correct.
4695 // reads froms buffer A and writes to buffer B
4696 // reads froms buffer C and writes to buffer A, but has a barrier to protect the write to A when
4697 test.BeginC();
4698 test.CopyAToB();
4699 test.SetEvent(VK_PIPELINE_STAGE_TRANSFER_BIT);
4700 test.WaitEventBufferTransfer(test.buffer_a, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
4701 test.CopyCToA();
4702 test.End();
4703
4704 test.Submit0(test.cba);
4705 test.Submit0(test.cbb);
4706
4707 // Ensure that the wait doesn't apply to async queues
4708 m_device->wait();
4709 test.Submit0(test.cba);
John Zulaufb66ee052022-06-10 16:52:28 -06004710 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ");
4711 test.Submit1(test.cbb);
4712 m_errorMonitor->VerifyFound();
4713
4714 // Ensure that the wait doesn't apply to access on other synchronized queues
John Zulaufb66ee052022-06-10 16:52:28 -06004715 m_device->wait();
4716
4717 test.Submit0Signal(test.cba);
John Zulaufb66ee052022-06-10 16:52:28 -06004718 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004719 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004720 m_errorMonitor->VerifyFound();
4721
4722 // Need to have a successful signal wait to get the semaphore in a usuable state.
John Zulaufb66ee052022-06-10 16:52:28 -06004723 test.BeginC();
4724 test.End();
John Zulaufc55f4702022-07-15 12:16:34 -06004725 test.Submit1Wait(test.cbc, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004726 m_device->wait();
4727
4728 // Next ensure that accesses from other queues aren't included in the first scope
4729 test.BeginA();
4730 test.CopyAToB();
4731 test.End();
4732
4733 test.BeginB();
4734 test.SetEvent(VK_PIPELINE_STAGE_TRANSFER_BIT);
4735 test.WaitEventBufferTransfer(test.buffer_a, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
4736 test.CopyCToA();
4737 test.End();
4738
4739 test.Submit0Signal(test.cba);
John Zulaufb66ee052022-06-10 16:52:28 -06004740 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004741 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulaufb66ee052022-06-10 16:52:28 -06004742 m_errorMonitor->VerifyFound();
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004743
4744 m_device->wait();
John Zulaufb66ee052022-06-10 16:52:28 -06004745}
John Zulauf46f5d6b2022-06-30 12:38:34 -06004746
4747TEST_F(VkSyncValTest, SyncQSOBarrierHazard) {
4748 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4749 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4750
4751 QSTestContext test(m_device);
4752 if (!test.Valid()) {
4753 GTEST_SKIP() << "Test requires at least 2 TRANSFER capable queues in the same queue_family.";
4754 }
4755
4756 VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
4757 VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
4758 auto image_ci = VkImageObj::ImageCreateInfo2D(128, 128, 1, 1, format, usage, VK_IMAGE_TILING_OPTIMAL);
4759
4760 VkImageObj image_a(m_device);
4761 image_a.Init(image_ci);
4762 ASSERT_TRUE(image_a.initialized());
4763 image_a.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4764
4765 VkImageObj image_b(m_device);
4766 image_b.Init(image_ci);
4767 ASSERT_TRUE(image_b.initialized());
4768 image_b.SetLayout(VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_GENERAL);
4769
4770 VkImageSubresourceLayers all_layers{VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1};
4771 VkOffset3D zero_offset{0, 0, 0};
4772 VkExtent3D full_extent{128, 128, 1}; // <-- image type is 2D
4773 VkImageCopy full_region = {all_layers, zero_offset, all_layers, zero_offset, full_extent};
4774
4775 test.BeginA();
4776 test.CopyGeneral(image_a, image_b, full_region);
4777 test.End();
4778
4779 test.BeginB();
4780 image_a.ImageMemoryBarrier(test.current_cb, VK_IMAGE_ASPECT_COLOR_BIT, VK_ACCESS_NONE, VK_ACCESS_NONE,
John Zulaufc55f4702022-07-15 12:16:34 -06004781 VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, VK_PIPELINE_STAGE_TRANSFER_BIT,
4782 VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulauf46f5d6b2022-06-30 12:38:34 -06004783 test.End();
4784
4785 // We're going to do the copy first, then use the skip on fail, to test three different ways...
4786 test.Submit0Signal(test.cba);
4787
4788 // First asynchronously fail -- the pipeline barrier in B shouldn't work on queue 1
4789 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE-RACING-READ ");
4790 test.Submit1(test.cbb);
4791 m_errorMonitor->VerifyFound();
4792
4793 // Next synchronously fail -- the pipeline barrier in B shouldn't work on queue 1
4794 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_READ");
John Zulaufc55f4702022-07-15 12:16:34 -06004795 test.Submit1Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
John Zulauf46f5d6b2022-06-30 12:38:34 -06004796 m_errorMonitor->VerifyFound();
4797
4798 // Then prove qso works (note that with the failure, the semaphore hasn't been waited, nor the layout changed)
John Zulaufc55f4702022-07-15 12:16:34 -06004799 test.Submit0Wait(test.cbb, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT);
Jeremy Gebben99f5d542022-08-01 09:46:56 -06004800
4801 m_device->wait();
John Zulauf46f5d6b2022-06-30 12:38:34 -06004802}
John Zulauf2f5947d2022-07-27 15:36:31 -06004803
4804TEST_F(VkSyncValTest, SyncQSRenderPass) {
4805 ASSERT_NO_FATAL_FAILURE(InitSyncValFramework(true)); // Enable QueueSubmit validation
4806 ASSERT_NO_FATAL_FAILURE(InitState(nullptr, nullptr, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT));
4807 if (IsPlatform(kNexusPlayer)) {
4808 printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
4809 return;
4810 }
4811
John Zulauf2f5947d2022-07-27 15:36:31 -06004812 CreateRenderPassHelper rp_helper(m_device);
4813 rp_helper.InitAllAttachmentsToLayoutGeneral();
4814
4815 rp_helper.InitState();
4816 rp_helper.InitAttachmentLayouts(); // Quiet any CoreChecks ImageLayout complaints
4817 m_device->wait(); // and quiesce the system
4818
4819 // The dependency protects the input attachment but not the color attachment
4820 rp_helper.subpass_dep.push_back({VK_SUBPASS_EXTERNAL, 0, VK_PIPELINE_STAGE_TRANSFER_BIT,
4821 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_ACCESS_TRANSFER_WRITE_BIT,
4822 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, 0U});
4823
4824 rp_helper.InitRenderPass();
4825 rp_helper.InitFramebuffer();
4826 rp_helper.InitBeginInfo();
4827
4828 VkCommandBufferObj cb0(m_device, m_commandPool);
4829 VkCommandBufferObj cb1(m_device, m_commandPool);
4830
4831 auto do_begin_rp = [&rp_helper](VkCommandBufferObj& cb_obj) { cb_obj.BeginRenderPass(rp_helper.render_pass_begin); };
4832
4833 auto do_clear = [&rp_helper](VkCommandBufferObj& cb_obj) {
4834 VkImageSubresourceRange full_subresource_range{VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1};
4835 vk::CmdClearColorImage(cb_obj.handle(), rp_helper.image_input->handle(), VK_IMAGE_LAYOUT_GENERAL, &rp_helper.ccv, 1,
4836 &full_subresource_range);
4837 vk::CmdClearColorImage(cb_obj.handle(), rp_helper.image_color->handle(), VK_IMAGE_LAYOUT_GENERAL, &rp_helper.ccv, 1,
4838 &full_subresource_range);
4839 };
4840
4841 // Single renderpass barrier (sanity check)
4842 cb0.begin();
4843 do_clear(cb0);
John Zulauf2f5947d2022-07-27 15:36:31 -06004844 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4845 do_begin_rp(cb0);
4846 m_errorMonitor->VerifyFound();
4847 // No "end render pass" as the begin fails
4848
John Zulauf2f5947d2022-07-27 15:36:31 -06004849 cb0.end();
4850 cb0.reset();
4851
4852 // Inter CB detection (dual cb), load is safe, clear errors at submit time
4853 cb0.begin();
4854 do_clear(cb0);
4855 cb0.end();
4856
4857 cb1.begin();
4858 do_begin_rp(cb1);
4859 cb1.EndRenderPass();
4860 cb1.end();
4861
4862 auto submit2 = lvl_init_struct<VkSubmitInfo>();
4863 VkCommandBuffer two_cbs[2] = {cb0.handle(), cb1.handle()};
4864 submit2.commandBufferCount = 2;
4865 submit2.pCommandBuffers = two_cbs;
John Zulauf2f5947d2022-07-27 15:36:31 -06004866 m_errorMonitor->SetDesiredFailureMsg(VK_DEBUG_REPORT_ERROR_BIT_EXT, "SYNC-HAZARD-WRITE_AFTER_WRITE");
4867 vk::QueueSubmit(m_device->m_queue, 1, &submit2, VK_NULL_HANDLE);
4868 m_errorMonitor->VerifyFound();
4869}