Jeremy Gebben | e2b2492 | 2021-12-22 12:07:16 -0700 | [diff] [blame] | 1 | /* Copyright (c) 2015-2022 The Khronos Group Inc. |
| 2 | * Copyright (c) 2015-2022 Valve Corporation |
| 3 | * Copyright (c) 2015-2022 LunarG, Inc. |
| 4 | * Copyright (C) 2015-2022 Google Inc. |
Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 5 | * |
| 6 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 7 | * you may not use this file except in compliance with the License. |
| 8 | * You may obtain a copy of the License at |
| 9 | * |
| 10 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 11 | * |
| 12 | * Unless required by applicable law or agreed to in writing, software |
| 13 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | * See the License for the specific language governing permissions and |
| 16 | * limitations under the License. |
| 17 | * |
| 18 | * Author: Chris Forbes <chrisf@ijw.co.nz> |
sfricke-samsung | 962cad9 | 2021-04-13 00:46:29 -0700 | [diff] [blame] | 19 | * |
| 20 | * The Shader Validation file is in charge of taking the Shader Module data and validating it |
Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 21 | */ |
| 22 | #ifndef VULKAN_SHADER_VALIDATION_H |
| 23 | #define VULKAN_SHADER_VALIDATION_H |
| 24 | |
Petr Kraus | 6c4bdce | 2019-08-27 17:35:01 +0200 | [diff] [blame] | 25 | #include <cstdlib> |
John Zulauf | 14c355b | 2019-06-27 16:09:37 -0600 | [diff] [blame] | 26 | |
Petr Kraus | 6c4bdce | 2019-08-27 17:35:01 +0200 | [diff] [blame] | 27 | #include "vulkan/vulkan.h" |
Mike Schuchardt | b61f9c9 | 2019-07-01 15:14:46 -0700 | [diff] [blame] | 28 | #include <generated/spirv_tools_commit_id.h> |
sfricke-samsung | 962cad9 | 2021-04-13 00:46:29 -0700 | [diff] [blame] | 29 | #include "shader_module.h" |
Jeremy Gebben | e2b2492 | 2021-12-22 12:07:16 -0700 | [diff] [blame] | 30 | #include "vk_layer_utils.h" |
Chris Forbes | 47567b7 | 2017-06-09 12:09:45 -0700 | [diff] [blame] | 31 | |
Jeremy Gebben | 5d97074 | 2021-05-31 16:04:14 -0600 | [diff] [blame] | 32 | struct DeviceFeatures; |
| 33 | struct DeviceExtensions; |
| 34 | |
Mark Lobodzinski | d8d658e | 2020-01-30 15:05:51 -0700 | [diff] [blame] | 35 | struct shader_stage_attributes { |
| 36 | char const *const name; |
| 37 | bool arrayed_input; |
| 38 | bool arrayed_output; |
| 39 | VkShaderStageFlags stage; |
| 40 | }; |
| 41 | |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 42 | class ValidationCache { |
Petr Kraus | 4ed81e3 | 2019-09-02 23:41:19 +0200 | [diff] [blame] | 43 | public: |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 44 | static VkValidationCacheEXT Create(VkValidationCacheCreateInfoEXT const *pCreateInfo) { |
| 45 | auto cache = new ValidationCache(); |
| 46 | cache->Load(pCreateInfo); |
| 47 | return VkValidationCacheEXT(cache); |
| 48 | } |
| 49 | |
| 50 | void Load(VkValidationCacheCreateInfoEXT const *pCreateInfo) { |
Cort Stratton | 77955d8 | 2018-02-01 23:14:50 -0800 | [diff] [blame] | 51 | const auto headerSize = 2 * sizeof(uint32_t) + VK_UUID_SIZE; |
| 52 | auto size = headerSize; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 53 | if (!pCreateInfo->pInitialData || pCreateInfo->initialDataSize < size) return; |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 54 | |
| 55 | uint32_t const *data = (uint32_t const *)pCreateInfo->pInitialData; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 56 | if (data[0] != size) return; |
| 57 | if (data[1] != VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT) return; |
Cort Stratton | 77955d8 | 2018-02-01 23:14:50 -0800 | [diff] [blame] | 58 | uint8_t expected_uuid[VK_UUID_SIZE]; |
| 59 | Sha1ToVkUuid(SPIRV_TOOLS_COMMIT_ID, expected_uuid); |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 60 | if (memcmp(&data[2], expected_uuid, VK_UUID_SIZE) != 0) return; // different version |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 61 | |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 62 | data = (uint32_t const *)(reinterpret_cast<uint8_t const *>(data) + headerSize); |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 63 | |
Jeremy Gebben | e2b2492 | 2021-12-22 12:07:16 -0700 | [diff] [blame] | 64 | auto guard = WriteLock(); |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 65 | for (; size < pCreateInfo->initialDataSize; data++, size += sizeof(uint32_t)) { |
Jeremy Gebben | e2b2492 | 2021-12-22 12:07:16 -0700 | [diff] [blame] | 66 | good_shader_hashes_.insert(*data); |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 67 | } |
| 68 | } |
| 69 | |
| 70 | void Write(size_t *pDataSize, void *pData) { |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 71 | const auto headerSize = 2 * sizeof(uint32_t) + VK_UUID_SIZE; // 4 bytes for header size + 4 bytes for version number + UUID |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 72 | if (!pData) { |
Jeremy Gebben | e2b2492 | 2021-12-22 12:07:16 -0700 | [diff] [blame] | 73 | *pDataSize = headerSize + good_shader_hashes_.size() * sizeof(uint32_t); |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 74 | return; |
| 75 | } |
| 76 | |
| 77 | if (*pDataSize < headerSize) { |
| 78 | *pDataSize = 0; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 79 | return; // Too small for even the header! |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 80 | } |
| 81 | |
| 82 | uint32_t *out = (uint32_t *)pData; |
| 83 | size_t actualSize = headerSize; |
| 84 | |
| 85 | // Write the header |
| 86 | *out++ = headerSize; |
| 87 | *out++ = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT; |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 88 | Sha1ToVkUuid(SPIRV_TOOLS_COMMIT_ID, reinterpret_cast<uint8_t *>(out)); |
| 89 | out = (uint32_t *)(reinterpret_cast<uint8_t *>(out) + VK_UUID_SIZE); |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 90 | |
Jeremy Gebben | e2b2492 | 2021-12-22 12:07:16 -0700 | [diff] [blame] | 91 | { |
| 92 | auto guard = ReadLock(); |
| 93 | for (auto it = good_shader_hashes_.begin(); it != good_shader_hashes_.end() && actualSize < *pDataSize; |
| 94 | it++, out++, actualSize += sizeof(uint32_t)) { |
| 95 | *out = *it; |
| 96 | } |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 97 | } |
| 98 | |
| 99 | *pDataSize = actualSize; |
| 100 | } |
| 101 | |
| 102 | void Merge(ValidationCache const *other) { |
Jeremy Gebben | da5ddff | 2022-01-07 10:55:03 -0700 | [diff] [blame] | 103 | // self-merging is invalid, but avoid deadlock below just in case. |
| 104 | if (other == this) { |
| 105 | return; |
| 106 | } |
Jeremy Gebben | e2b2492 | 2021-12-22 12:07:16 -0700 | [diff] [blame] | 107 | auto other_guard = other->ReadLock(); |
| 108 | auto guard = WriteLock(); |
| 109 | good_shader_hashes_.reserve(good_shader_hashes_.size() + other->good_shader_hashes_.size()); |
| 110 | for (auto h : other->good_shader_hashes_) good_shader_hashes_.insert(h); |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 111 | } |
| 112 | |
| 113 | static uint32_t MakeShaderHash(VkShaderModuleCreateInfo const *smci); |
| 114 | |
Jeremy Gebben | e2b2492 | 2021-12-22 12:07:16 -0700 | [diff] [blame] | 115 | bool Contains(uint32_t hash) { |
| 116 | auto guard = ReadLock(); |
| 117 | return good_shader_hashes_.count(hash) != 0; |
| 118 | } |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 119 | |
Jeremy Gebben | e2b2492 | 2021-12-22 12:07:16 -0700 | [diff] [blame] | 120 | void Insert(uint32_t hash) { |
| 121 | auto guard = WriteLock(); |
| 122 | good_shader_hashes_.insert(hash); |
| 123 | } |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 124 | |
Petr Kraus | 4ed81e3 | 2019-09-02 23:41:19 +0200 | [diff] [blame] | 125 | private: |
Jeremy Gebben | e2b2492 | 2021-12-22 12:07:16 -0700 | [diff] [blame] | 126 | ValidationCache() {} |
| 127 | ReadLockGuard ReadLock() const { return ReadLockGuard(lock_); } |
| 128 | WriteLockGuard WriteLock() { return WriteLockGuard(lock_); } |
| 129 | |
Petr Kraus | 6c4bdce | 2019-08-27 17:35:01 +0200 | [diff] [blame] | 130 | void Sha1ToVkUuid(const char *sha1_str, uint8_t *uuid) { |
| 131 | // Convert sha1_str from a hex string to binary. We only need VK_UUID_SIZE bytes of |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 132 | // output, so pad with zeroes if the input string is shorter than that, and truncate |
| 133 | // if it's longer. |
Peter Griffin | 3925f0b | 2020-04-09 09:31:25 +0100 | [diff] [blame] | 134 | #if defined(__GNUC__) && (__GNUC__ > 8) |
| 135 | #pragma GCC diagnostic push |
| 136 | #pragma GCC diagnostic ignored "-Wstringop-truncation" |
| 137 | #endif |
Petr Kraus | 6c4bdce | 2019-08-27 17:35:01 +0200 | [diff] [blame] | 138 | char padded_sha1_str[2 * VK_UUID_SIZE + 1] = {}; // 2 hex digits == 1 byte |
| 139 | std::strncpy(padded_sha1_str, sha1_str, 2 * VK_UUID_SIZE); |
Peter Griffin | 3925f0b | 2020-04-09 09:31:25 +0100 | [diff] [blame] | 140 | #if defined(__GNUC__) && (__GNUC__ > 8) |
| 141 | #pragma GCC diagnostic pop |
| 142 | #endif |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 143 | for (uint32_t i = 0; i < VK_UUID_SIZE; ++i) { |
Petr Kraus | 6c4bdce | 2019-08-27 17:35:01 +0200 | [diff] [blame] | 144 | const char byte_str[] = {padded_sha1_str[2 * i + 0], padded_sha1_str[2 * i + 1], '\0'}; |
| 145 | uuid[i] = static_cast<uint8_t>(std::strtoul(byte_str, nullptr, 16)); |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 146 | } |
Cort Stratton | b614d33 | 2017-11-22 16:05:49 -0800 | [diff] [blame] | 147 | } |
Jeremy Gebben | e2b2492 | 2021-12-22 12:07:16 -0700 | [diff] [blame] | 148 | |
| 149 | // hashes of shaders that have passed validation before, and can be skipped. |
| 150 | // we don't store negative results, as we would have to also store what was |
| 151 | // wrong with them; also, we expect they will get fixed, so we're less |
| 152 | // likely to see them again. |
| 153 | layer_data::unordered_set<uint32_t> good_shader_hashes_; |
| 154 | mutable ReadWriteLock lock_; |
Chris Forbes | 9a61e08 | 2017-07-24 15:35:29 -0700 | [diff] [blame] | 155 | }; |
| 156 | |
Tony-LunarG | 8a51b7d | 2020-07-01 15:57:23 -0600 | [diff] [blame] | 157 | spv_target_env PickSpirvEnv(uint32_t api_version, bool spirv_1_4); |
| 158 | |
Jeremy Gebben | 5d97074 | 2021-05-31 16:04:14 -0600 | [diff] [blame] | 159 | void AdjustValidatorOptions(const DeviceExtensions &device_extensions, const DeviceFeatures &enabled_features, |
Tony-LunarG | 9fe69a4 | 2020-07-23 15:09:37 -0600 | [diff] [blame] | 160 | spvtools::ValidatorOptions &options); |
| 161 | |
Dave Houlton | a9df0ce | 2018-02-07 10:51:23 -0700 | [diff] [blame] | 162 | #endif // VULKAN_SHADER_VALIDATION_H |