blob: df655cdbbd7c7a26be5f51d57762f6d0c88dc8f6 [file] [log] [blame]
alan-bakerfec0a472018-11-08 18:09:40 -05001// Copyright 2018 The Clspv Authors. All rights reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include "clang/AST/RecordLayout.h"
16#include "clang/AST/RecursiveASTVisitor.h"
17#include "clang/Basic/TargetInfo.h"
18#include "clang/CodeGen/CodeGenAction.h"
19#include "clang/Frontend/CompilerInstance.h"
20#include "clang/Frontend/FrontendPluginRegistry.h"
21#include "clang/Frontend/TextDiagnosticPrinter.h"
22
Marco Antognini535998c2020-09-16 18:48:51 +010023#include "llvm/Support/Debug.h"
24
alan-bakerfec0a472018-11-08 18:09:40 -050025#include "clspv/Option.h"
26
27#include "FrontendPlugin.h"
28
Kévin Petit0fc88042019-04-09 23:25:02 +010029#include <unordered_set>
30
alan-bakerfec0a472018-11-08 18:09:40 -050031using namespace clang;
32
33namespace {
alan-baker9b0ec3c2020-04-06 14:45:34 -040034
35static uint32_t kClusteredCount = 0;
36
alan-bakerfec0a472018-11-08 18:09:40 -050037struct ExtraValidationConsumer final : public ASTConsumer {
38private:
39 CompilerInstance &Instance;
40 llvm::StringRef InFile;
41
Diego Novillo3cc8d7a2019-04-10 13:30:34 -040042 enum Layout { UBO, SSBO };
alan-baker9bb09792019-03-25 11:25:13 -040043
alan-bakerfec0a472018-11-08 18:09:40 -050044 enum CustomDiagnosticType {
Marco Antognini535998c2020-09-16 18:48:51 +010045 CustomDiagnosticVectorsMoreThan4Elements,
46 CustomDiagnosticUnsupportedKernelParameter,
47 CustomDiagnosticVoidPointer,
48 CustomDiagnosticUnalignedScalar,
49 CustomDiagnosticUnalignedVec2,
50 CustomDiagnosticUnalignedVec4,
51 CustomDiagnosticUBOUnalignedArray,
52 CustomDiagnosticUBOUnalignedStruct,
53 CustomDiagnosticSmallStraddle,
54 CustomDiagnosticLargeStraddle,
55 CustomDiagnosticUnalignedStructMember,
56 CustomDiagnosticUBORestrictedSize,
57 CustomDiagnosticUBORestrictedStruct,
58 CustomDiagnosticUBOArrayStride,
59 CustomDiagnosticLocationInfo,
60 CustomDiagnosticSSBOUnalignedArray,
61 CustomDiagnosticSSBOUnalignedStruct,
62 CustomDiagnosticOverloadedKernel,
63 CustomDiagnosticStructContainsPointer,
64 CustomDiagnosticRecursiveStruct,
65 CustomDiagnosticPushConstantSizeExceeded,
66 CustomDiagnosticPushConstantContainsArray,
67 CustomDiagnosticUnsupported16BitStorage,
68 CustomDiagnosticUnsupported8BitStorage,
alan-bakerfec0a472018-11-08 18:09:40 -050069 CustomDiagnosticTotal
70 };
71 std::vector<unsigned> CustomDiagnosticsIDMap;
72
alan-baker7efcaaa2020-05-06 19:33:27 -040073 clspv::Option::StorageClass ConvertToStorageClass(clang::LangAS aspace) {
74 switch (aspace) {
75 case LangAS::opencl_constant:
76 if (clspv::Option::ConstantArgsInUniformBuffer()) {
77 return clspv::Option::StorageClass::kUBO;
78 } else {
79 return clspv::Option::StorageClass::kSSBO;
80 }
81 case LangAS::opencl_global:
82 default:
83 return clspv::Option::StorageClass::kSSBO;
84 }
85 }
86
87 bool ContainsSizedType(QualType QT, uint32_t width) {
88 auto canonical = QT.getCanonicalType();
89 if (auto *BT = dyn_cast<BuiltinType>(canonical)) {
90 switch (BT->getKind()) {
91 case BuiltinType::UShort:
92 case BuiltinType::Short:
93 case BuiltinType::Half:
94 case BuiltinType::Float16:
95 return width == 16;
96 case BuiltinType::UChar:
97 case BuiltinType::Char_U:
98 case BuiltinType::SChar:
99 case BuiltinType::Char_S:
100 return width == 8;
101 default:
102 return false;
103 }
104 } else if (auto *PT = dyn_cast<PointerType>(canonical)) {
105 return ContainsSizedType(PT->getPointeeType(), width);
106 } else if (auto *AT = dyn_cast<ArrayType>(canonical)) {
107 return ContainsSizedType(AT->getElementType(), width);
108 } else if (auto *VT = dyn_cast<VectorType>(canonical)) {
109 return ContainsSizedType(VT->getElementType(), width);
110 } else if (auto *RT = dyn_cast<RecordType>(canonical)) {
111 for (auto field_decl : RT->getDecl()->fields()) {
112 if (ContainsSizedType(field_decl->getType(), width))
113 return true;
114 }
115 }
116
117 return false;
118 }
119
alan-baker990e9b92019-06-07 11:26:39 -0400120 bool ContainsPointerType(QualType QT) {
121 auto canonical = QT.getCanonicalType();
122 if (canonical->isPointerType()) {
123 return true;
124 } else if (auto *AT = dyn_cast<ArrayType>(canonical)) {
125 return ContainsPointerType(AT->getElementType());
126 } else if (auto *RT = dyn_cast<RecordType>(canonical)) {
127 for (auto field_decl : RT->getDecl()->fields()) {
128 if (ContainsPointerType(field_decl->getType()))
129 return true;
130 }
131 }
132
133 return false;
134 }
135
alan-baker9b0ec3c2020-04-06 14:45:34 -0400136 bool ContainsArrayType(QualType QT) {
137 auto canonical = QT.getCanonicalType();
138 if (auto *PT = dyn_cast<PointerType>(canonical)) {
139 return ContainsArrayType(PT->getPointeeType());
alan-baker4a757f62020-04-22 08:17:49 -0400140 } else if (isa<ArrayType>(canonical)) {
alan-baker9b0ec3c2020-04-06 14:45:34 -0400141 return true;
142 } else if (auto *RT = dyn_cast<RecordType>(canonical)) {
143 for (auto field_decl : RT->getDecl()->fields()) {
144 if (ContainsArrayType(field_decl->getType()))
145 return true;
146 }
147 }
148
149 return false;
150 }
151
alan-baker28361f72020-01-07 16:35:25 -0500152 bool IsRecursiveType(QualType QT, llvm::DenseSet<const Type *> *seen) {
153 auto canonical = QT.getCanonicalType();
154 if (canonical->isRecordType() &&
155 !seen->insert(canonical.getTypePtr()).second) {
156 return true;
157 }
158
159 if (auto *PT = dyn_cast<PointerType>(canonical)) {
alan-baker4a757f62020-04-22 08:17:49 -0400160 return IsRecursiveType(PT->getPointeeType(), seen);
alan-baker28361f72020-01-07 16:35:25 -0500161 } else if (auto *AT = dyn_cast<ArrayType>(canonical)) {
162 return IsRecursiveType(AT->getElementType(), seen);
163 } else if (auto *RT = dyn_cast<RecordType>(canonical)) {
164 for (auto field_decl : RT->getDecl()->fields()) {
165 if (IsRecursiveType(field_decl->getType(), seen))
166 return true;
167 }
168 }
169
170 seen->erase(canonical.getTypePtr());
171 return false;
172 }
173
Marco Antognini535998c2020-09-16 18:48:51 +0100174 bool IsSupportedType(QualType QT, SourceRange SR, bool IsKernelParameter) {
alan-bakerfec0a472018-11-08 18:09:40 -0500175 auto *Ty = QT.getTypePtr();
176
177 // First check if we have a pointer type.
178 if (Ty->isPointerType()) {
179 const Type *pointeeTy = Ty->getPointeeType().getTypePtr();
180 if (pointeeTy && pointeeTy->isVoidType()) {
181 // We don't support void pointers.
182 Instance.getDiagnostics().Report(
183 SR.getBegin(), CustomDiagnosticsIDMap[CustomDiagnosticVoidPointer]);
184 return false;
185 }
186 // Otherwise check recursively.
Marco Antognini535998c2020-09-16 18:48:51 +0100187 return IsSupportedType(Ty->getPointeeType(), SR, IsKernelParameter);
alan-bakerfec0a472018-11-08 18:09:40 -0500188 }
189
190 const auto &canonicalType = QT.getCanonicalType();
191 if (auto *VT = llvm::dyn_cast<ExtVectorType>(canonicalType)) {
Marco Antognini535998c2020-09-16 18:48:51 +0100192 // We don't support vectors with more than 4 elements under all
193 // circumstances.
alan-bakerfec0a472018-11-08 18:09:40 -0500194 if (4 < VT->getNumElements()) {
Marco Antognini535998c2020-09-16 18:48:51 +0100195 if (clspv::Option::LongVectorSupport()) {
196 if (IsKernelParameter) {
197 Report(CustomDiagnosticUnsupportedKernelParameter, SR, SR);
198 return false;
199 }
200 } else {
201 Report(CustomDiagnosticVectorsMoreThan4Elements, SR, SR);
202 return false;
203 }
alan-bakerfec0a472018-11-08 18:09:40 -0500204 }
Marco Antognini535998c2020-09-16 18:48:51 +0100205
206 return true;
207 }
208
209 if (auto *RT = llvm::dyn_cast<RecordType>(canonicalType)) {
alan-baker28361f72020-01-07 16:35:25 -0500210 // Do not allow recursive struct definitions.
211 llvm::DenseSet<const Type *> seen;
212 if (IsRecursiveType(canonicalType, &seen)) {
alan-baker990e9b92019-06-07 11:26:39 -0400213 Instance.getDiagnostics().Report(
214 SR.getBegin(),
alan-baker28361f72020-01-07 16:35:25 -0500215 CustomDiagnosticsIDMap[CustomDiagnosticRecursiveStruct]);
alan-baker990e9b92019-06-07 11:26:39 -0400216 return false;
217 }
Marco Antognini535998c2020-09-16 18:48:51 +0100218
219 // To avoid infinite recursion, first verify that the record is not
220 // recursive and then that its fields are supported.
221 for (auto *field_decl : RT->getDecl()->fields()) {
222 if (!IsSupportedType(field_decl->getType(), SR, IsKernelParameter)) {
223 return false;
224 }
225 }
226
227 return true;
alan-bakerfec0a472018-11-08 18:09:40 -0500228 }
229
Marco Antognini535998c2020-09-16 18:48:51 +0100230 if (auto *AT = llvm::dyn_cast<ArrayType>(canonicalType)) {
231 return IsSupportedType(AT->getElementType(), SR, IsKernelParameter);
232 }
233
234 // For function prototypes, recurse on return type and parameter types.
235 if (auto *FT = llvm::dyn_cast<FunctionProtoType>(canonicalType)) {
236 IsKernelParameter =
237 IsKernelParameter || (FT->getCallConv() == CC_OpenCLKernel);
238 for (auto param : FT->getParamTypes()) {
239 if (!IsSupportedType(param, SR, IsKernelParameter)) {
240 return false;
241 }
242 }
243
244 if (!IsSupportedType(FT->getReturnType(), SR, IsKernelParameter)) {
245 return false;
246 }
247
248 return true;
249 }
250
251 if (QT->isBuiltinType()) {
252 return true;
253 }
254
255#ifndef NDEBUG
256 llvm::dbgs() << "IsSupportedType lacks support for QualType: "
257 << QT.getAsString() << '\n';
258#endif
259 llvm_unreachable("Type not covered by IsSupportedType.");
alan-bakerfec0a472018-11-08 18:09:40 -0500260 }
261
alan-baker3d9e2012019-01-11 14:55:30 -0500262 // Report a diagnostic using |diag|. If |arg_range| and |specific_range|
263 // differ, also issue a note with the specific location of the error.
264 void Report(const CustomDiagnosticType &diag, SourceRange arg_range,
265 SourceRange specific_range) {
266 Instance.getDiagnostics().Report(arg_range.getBegin(),
267 CustomDiagnosticsIDMap[diag]);
268 if (arg_range != specific_range) {
269 Instance.getDiagnostics().Report(
270 specific_range.getBegin(),
271 CustomDiagnosticsIDMap[CustomDiagnosticLocationInfo]);
272 }
273 }
274
alan-baker9bb09792019-03-25 11:25:13 -0400275 // Returns the alignment of |QT| to satisfy |layout|'s rules.
276 uint64_t GetAlignment(const QualType QT, const Layout &layout,
277 const ASTContext &context) const {
alan-bakerfec0a472018-11-08 18:09:40 -0500278 const auto canonical = QT.getCanonicalType();
279 uint64_t alignment = context.getTypeAlignInChars(canonical).getQuantity();
alan-baker9bb09792019-03-25 11:25:13 -0400280 if (layout == UBO &&
281 (canonical->isRecordType() || canonical->isArrayType())) {
alan-bakerfec0a472018-11-08 18:09:40 -0500282 return llvm::alignTo(alignment, 16);
283 }
284 return alignment;
285 }
286
287 // Returns true if |QT| is a valid layout for a Uniform buffer. Refer to
288 // 14.5.4 in the Vulkan specification.
alan-baker9bb09792019-03-25 11:25:13 -0400289 bool IsSupportedLayout(QualType QT, uint64_t offset, const Layout &layout,
290 ASTContext &context, SourceRange arg_range,
291 SourceRange specific_range) {
alan-bakerfec0a472018-11-08 18:09:40 -0500292 const auto canonical = QT.getCanonicalType();
293 if (canonical->isScalarType()) {
alan-baker9bb09792019-03-25 11:25:13 -0400294 if (!IsSupportedScalarLayout(canonical, offset, layout, context,
295 arg_range, specific_range))
alan-bakerfec0a472018-11-08 18:09:40 -0500296 return false;
297 } else if (canonical->isExtVectorType()) {
alan-baker9bb09792019-03-25 11:25:13 -0400298 if (!IsSupportedVectorLayout(canonical, offset, layout, context,
299 arg_range, specific_range))
alan-bakerfec0a472018-11-08 18:09:40 -0500300 return false;
301 } else if (canonical->isArrayType()) {
alan-baker9bb09792019-03-25 11:25:13 -0400302 if (!IsSupportedArrayLayout(canonical, offset, layout, context, arg_range,
303 specific_range))
alan-bakerfec0a472018-11-08 18:09:40 -0500304 return false;
305 } else if (canonical->isRecordType()) {
alan-baker9bb09792019-03-25 11:25:13 -0400306 if (!IsSupportedRecordLayout(canonical, offset, layout, context,
307 arg_range, specific_range))
alan-bakerfec0a472018-11-08 18:09:40 -0500308 return false;
309 }
310
311 // TODO(alan-baker): Find a way to avoid this restriction.
312 // Don't allow padding. This prevents structs like:
313 // struct {
314 // int x[2];
315 // int y __attribute((aligned(16)));
316 // };
317 //
318 // This would map in LLVM to { [2 x i32], [8 x i8], i32, [12 xi8] }.
319 // There is no easy way to manipulate the padding after the array to
320 // satisfy the standard Uniform buffer layout rules in this case. The usual
321 // trick is replacing the i8 arrays with an i32 element, but the i32 would
322 // still be laid out too close to the array.
323 const auto type_size = context.getTypeSizeInChars(canonical).getQuantity();
alan-baker9bb09792019-03-25 11:25:13 -0400324 const auto type_align = GetAlignment(canonical, layout, context);
325 if (layout == UBO && (type_size % type_align != 0)) {
alan-baker3d9e2012019-01-11 14:55:30 -0500326 Report(CustomDiagnosticUBORestrictedSize, arg_range, specific_range);
alan-bakerfec0a472018-11-08 18:09:40 -0500327 return false;
328 }
329
330 return true;
331 }
332
alan-baker9bb09792019-03-25 11:25:13 -0400333 bool IsSupportedScalarLayout(QualType QT, uint64_t offset,
334 const Layout & /*layout*/, ASTContext &context,
335 SourceRange arg_range,
336 SourceRange specific_range) {
alan-bakerfec0a472018-11-08 18:09:40 -0500337 // A scalar type of size N has a base alignment on N.
338 const unsigned type_size = context.getTypeSizeInChars(QT).getQuantity();
339 if (offset % type_size != 0) {
alan-baker9bb09792019-03-25 11:25:13 -0400340 Report(CustomDiagnosticUnalignedScalar, arg_range, specific_range);
alan-bakerfec0a472018-11-08 18:09:40 -0500341 return false;
342 }
343
344 return true;
345 }
346
alan-baker9bb09792019-03-25 11:25:13 -0400347 bool IsSupportedVectorLayout(QualType QT, uint64_t offset,
348 const Layout &layout, ASTContext &context,
349 SourceRange arg_range,
350 SourceRange specific_range) {
alan-bakerfec0a472018-11-08 18:09:40 -0500351 // 2-component vectors have a base alignment of 2 * (size of element).
352 // 3- and 4-component vectors hae a base alignment of 4 * (size of
353 // element).
354 const auto *VT = llvm::cast<VectorType>(QT);
355 const auto ele_size =
356 context.getTypeSizeInChars(VT->getElementType()).getQuantity();
357 if (VT->getNumElements() == 2) {
358 if (offset % (ele_size * 2) != 0) {
alan-baker9bb09792019-03-25 11:25:13 -0400359 Report(CustomDiagnosticUnalignedVec2, arg_range, specific_range);
alan-bakerfec0a472018-11-08 18:09:40 -0500360 return false;
361 }
362 } else if (offset % (ele_size * 4) != 0) {
363 // Other vector sizes cause errors elsewhere.
alan-baker9bb09792019-03-25 11:25:13 -0400364 Report(CustomDiagnosticUnalignedVec4, arg_range, specific_range);
alan-bakerfec0a472018-11-08 18:09:40 -0500365 return false;
366 }
367
368 // Straddling rules:
369 // * If total vector size is less than 16 bytes, the offset must place the
370 // entire vector within the same 16 bytes.
371 // * If total vector size is greater than 16 bytes, the offset must be a
372 // multiple of 16.
373 const auto size = context.getTypeSizeInChars(QT).getQuantity();
374 if (size <= 16 && (offset / 16 != (offset + size - 1) / 16)) {
alan-baker9bb09792019-03-25 11:25:13 -0400375 Report(CustomDiagnosticSmallStraddle, arg_range, specific_range);
alan-bakerfec0a472018-11-08 18:09:40 -0500376 return false;
377 } else if (size > 16 && (offset % 16 != 0)) {
alan-baker9bb09792019-03-25 11:25:13 -0400378 Report(CustomDiagnosticLargeStraddle, arg_range, specific_range);
alan-bakerfec0a472018-11-08 18:09:40 -0500379 return false;
380 }
381
alan-baker9bb09792019-03-25 11:25:13 -0400382 return IsSupportedLayout(VT->getElementType(), offset, layout, context,
383 arg_range, specific_range);
alan-bakerfec0a472018-11-08 18:09:40 -0500384 }
385
alan-baker9bb09792019-03-25 11:25:13 -0400386 bool IsSupportedArrayLayout(QualType QT, uint64_t offset,
387 const Layout &layout, ASTContext &context,
388 SourceRange arg_range,
389 SourceRange specific_range) {
390 // An array has a base alignment of is element type.
391 // If the layout is UBO, the alignment is rounded up to a multiple of 16.
alan-bakerfec0a472018-11-08 18:09:40 -0500392 const auto *AT = llvm::cast<ArrayType>(QT);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400393 const auto element_align =
394 GetAlignment(AT->getElementType(), layout, context);
alan-baker9bb09792019-03-25 11:25:13 -0400395 const auto type_align =
396 layout == UBO ? llvm::alignTo(element_align, 16) : element_align;
alan-bakerfec0a472018-11-08 18:09:40 -0500397 if (offset % type_align != 0) {
alan-baker9bb09792019-03-25 11:25:13 -0400398 auto diag_id = layout == UBO ? CustomDiagnosticUBOUnalignedArray
399 : CustomDiagnosticSSBOUnalignedArray;
400 Report(diag_id, arg_range, specific_range);
alan-bakerfec0a472018-11-08 18:09:40 -0500401 return false;
402 }
alan-baker9bb09792019-03-25 11:25:13 -0400403 if (layout == UBO && !clspv::Option::RelaxedUniformBufferLayout()) {
alan-baker3d9e2012019-01-11 14:55:30 -0500404 // The ArrayStride must be a multiple of the base alignment of the array
405 // (i.e. a multiple of 16). This means that the element size must be
406 // restricted to be the base alignment of the array.
407 const auto element_size =
408 context.getTypeSizeInChars(AT->getElementType()).getQuantity();
409 if (element_size % type_align != 0) {
410 Report(CustomDiagnosticUBOArrayStride, arg_range, specific_range);
411 return false;
412 }
413 }
alan-bakerfec0a472018-11-08 18:09:40 -0500414
alan-baker9bb09792019-03-25 11:25:13 -0400415 return IsSupportedLayout(AT->getElementType(), offset, layout, context,
416 arg_range, specific_range);
alan-bakerfec0a472018-11-08 18:09:40 -0500417 }
418
alan-baker9bb09792019-03-25 11:25:13 -0400419 bool IsSupportedRecordLayout(QualType QT, uint64_t offset,
420 const Layout &layout, ASTContext &context,
421 SourceRange arg_range,
422 SourceRange specific_range) {
423 // A structure has a base alignment of its largest member. For UBO layouts,
424 // alignment is rounded up to a multiple of 16.
alan-bakerfec0a472018-11-08 18:09:40 -0500425 const auto *RT = llvm::cast<RecordType>(QT);
alan-baker9bb09792019-03-25 11:25:13 -0400426 auto type_alignment = GetAlignment(QT, layout, context);
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400427 if (layout == UBO)
428 llvm::alignTo(type_alignment, 16);
alan-bakerfec0a472018-11-08 18:09:40 -0500429 if (offset % type_alignment != 0) {
alan-baker9bb09792019-03-25 11:25:13 -0400430 auto diag_id = layout == UBO ? CustomDiagnosticUBOUnalignedStruct
431 : CustomDiagnosticSSBOUnalignedStruct;
432 Report(diag_id, arg_range, specific_range);
alan-bakerfec0a472018-11-08 18:09:40 -0500433 return false;
434 }
435
alan-baker9bb09792019-03-25 11:25:13 -0400436 const auto &record_layout = context.getASTRecordLayout(RT->getDecl());
alan-bakerfec0a472018-11-08 18:09:40 -0500437 const FieldDecl *prev = nullptr;
438 for (auto field_decl : RT->getDecl()->fields()) {
439 const auto field_type = field_decl->getType();
alan-bakerfec0a472018-11-08 18:09:40 -0500440 const unsigned field_no = field_decl->getFieldIndex();
441 const uint64_t field_offset =
alan-baker9bb09792019-03-25 11:25:13 -0400442 record_layout.getFieldOffset(field_no) / context.getCharWidth();
alan-bakerfec0a472018-11-08 18:09:40 -0500443
444 // Rules must be checked recursively.
alan-baker9bb09792019-03-25 11:25:13 -0400445 if (!IsSupportedLayout(field_type, field_offset + offset, layout, context,
446 arg_range, field_decl->getSourceRange())) {
alan-bakerfec0a472018-11-08 18:09:40 -0500447 return false;
448 }
449
450 if (prev) {
451 const auto prev_canonical = prev->getType().getCanonicalType();
452 const uint64_t prev_offset =
alan-baker9bb09792019-03-25 11:25:13 -0400453 record_layout.getFieldOffset(field_no - 1) / context.getCharWidth();
alan-bakerfec0a472018-11-08 18:09:40 -0500454 const auto prev_size =
455 context.getTypeSizeInChars(prev_canonical).getQuantity();
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400456 const auto prev_alignment =
457 GetAlignment(prev_canonical, layout, context);
alan-bakerfec0a472018-11-08 18:09:40 -0500458 const auto next_available =
459 prev_offset + llvm::alignTo(prev_size, prev_alignment);
460 if (prev_canonical->isArrayType() || prev_canonical->isRecordType()) {
461 // The next element after an array or struct must be placed on or
462 // after the next multiple of the alignment of that array or
463 // struct.
alan-baker9bb09792019-03-25 11:25:13 -0400464 // For UBO layouts, both arrays and structs must be aligned to a
465 // multiple of 16 bytes.
466 const uint64_t final_align = layout == UBO
467 ? llvm::alignTo(next_available, 16)
468 : next_available;
469 if (final_align > field_offset) {
470 Report(CustomDiagnosticUnalignedStructMember, arg_range,
alan-baker3d9e2012019-01-11 14:55:30 -0500471 field_decl->getSourceRange());
alan-bakerfec0a472018-11-08 18:09:40 -0500472 return false;
473 }
474 }
475 }
476
477 prev = field_decl;
478 }
479
480 return true;
481 }
482
483 // This will be used to check the inside of function bodies.
484 class DeclVisitor : public RecursiveASTVisitor<DeclVisitor> {
485 private:
486 ExtraValidationConsumer &consumer;
487
488 public:
489 explicit DeclVisitor(ExtraValidationConsumer &VC) : consumer(VC) {}
490
491 // Visits a declaration. Emits a diagnostic and returns false if the
492 // declaration represents an unsupported vector value or vector type.
493 // Otherwise returns true.
Marco Antognini535998c2020-09-16 18:48:51 +0100494 //
495 // Looking at the Decl class hierarchy, it seems ValueDecl and TypeDecl
496 // are the only two that might represent an unsupported vector type.
497 bool VisitValueDecl(ValueDecl *VD) {
498 return consumer.IsSupportedType(VD->getType(), VD->getSourceRange(),
499 false);
500 }
501 bool VisitValueDecl(TypeDecl *TD) {
502 QualType DefinedType = TD->getASTContext().getTypeDeclType(TD);
503 return consumer.IsSupportedType(DefinedType, TD->getSourceRange(), false);
alan-bakerfec0a472018-11-08 18:09:40 -0500504 }
505 };
506
507 DeclVisitor Visitor;
Kévin Petit0fc88042019-04-09 23:25:02 +0100508 std::unordered_set<std::string> Kernels;
alan-bakerfec0a472018-11-08 18:09:40 -0500509
510public:
511 explicit ExtraValidationConsumer(CompilerInstance &Instance,
512 llvm::StringRef InFile)
513 : Instance(Instance), InFile(InFile),
514 CustomDiagnosticsIDMap(CustomDiagnosticTotal), Visitor(*this) {
515 auto &DE = Instance.getDiagnostics();
516
517 CustomDiagnosticsIDMap[CustomDiagnosticVectorsMoreThan4Elements] =
518 DE.getCustomDiagID(
519 DiagnosticsEngine::Error,
520 "vectors with more than 4 elements are not supported");
Marco Antognini535998c2020-09-16 18:48:51 +0100521 CustomDiagnosticsIDMap[CustomDiagnosticUnsupportedKernelParameter] =
522 DE.getCustomDiagID(DiagnosticsEngine::Error,
523 "vectors with more than 4 elements are not "
524 "supported as kernel parameters");
alan-bakerfec0a472018-11-08 18:09:40 -0500525 CustomDiagnosticsIDMap[CustomDiagnosticVoidPointer] = DE.getCustomDiagID(
526 DiagnosticsEngine::Error, "pointer-to-void is not supported");
alan-baker9bb09792019-03-25 11:25:13 -0400527 CustomDiagnosticsIDMap[CustomDiagnosticUnalignedScalar] =
Diego Novillo3cc8d7a2019-04-10 13:30:34 -0400528 DE.getCustomDiagID(DiagnosticsEngine::Error,
529 "scalar elements must be aligned to their size");
alan-baker9bb09792019-03-25 11:25:13 -0400530 CustomDiagnosticsIDMap[CustomDiagnosticUnalignedVec2] = DE.getCustomDiagID(
531 DiagnosticsEngine::Error,
532 "two-component vectors must be aligned to 2 times their element size");
533 CustomDiagnosticsIDMap[CustomDiagnosticUnalignedVec4] =
alan-bakerfec0a472018-11-08 18:09:40 -0500534 DE.getCustomDiagID(DiagnosticsEngine::Error,
alan-baker9bb09792019-03-25 11:25:13 -0400535 "three- and four-component vectors must be aligned "
536 "to 4 times their element size");
alan-bakerfec0a472018-11-08 18:09:40 -0500537 CustomDiagnosticsIDMap[CustomDiagnosticUBOUnalignedArray] =
538 DE.getCustomDiagID(DiagnosticsEngine::Error,
539 "in an UBO, arrays must be aligned to their element "
540 "alignment, rounded up to a multiple of 16 bytes");
541 CustomDiagnosticsIDMap[CustomDiagnosticUBOUnalignedStruct] =
542 DE.getCustomDiagID(
543 DiagnosticsEngine::Error,
544 "in an UBO, structs must be aligned to their "
545 "largest element alignment, rounded up to a multiple of "
546 "16 bytes");
alan-baker9bb09792019-03-25 11:25:13 -0400547 CustomDiagnosticsIDMap[CustomDiagnosticSmallStraddle] =
alan-bakerfec0a472018-11-08 18:09:40 -0500548 DE.getCustomDiagID(DiagnosticsEngine::Error,
alan-baker9bb09792019-03-25 11:25:13 -0400549 "vectors with a total size less than or equal to 16 "
550 "bytes must be placed entirely within a 16 byte "
551 "aligned region");
552 CustomDiagnosticsIDMap[CustomDiagnosticLargeStraddle] =
alan-bakerfec0a472018-11-08 18:09:40 -0500553 DE.getCustomDiagID(DiagnosticsEngine::Error,
alan-baker9bb09792019-03-25 11:25:13 -0400554 "vectors with a total size greater than 16 bytes "
555 "must aligned to 16 bytes");
556 CustomDiagnosticsIDMap[CustomDiagnosticUnalignedStructMember] =
alan-bakerfec0a472018-11-08 18:09:40 -0500557 DE.getCustomDiagID(DiagnosticsEngine::Error,
alan-baker9bb09792019-03-25 11:25:13 -0400558 "a structure member must not be placed between the "
559 "end of a structure or array and the next multiple "
560 "of the base alignment of that structure or array");
alan-bakerfec0a472018-11-08 18:09:40 -0500561 CustomDiagnosticsIDMap[CustomDiagnosticUBORestrictedSize] =
562 DE.getCustomDiagID(DiagnosticsEngine::Error,
563 "clspv restriction: UBO element size must be a "
564 "multiple of that element's alignment");
565 CustomDiagnosticsIDMap[CustomDiagnosticUBORestrictedStruct] =
566 DE.getCustomDiagID(
567 DiagnosticsEngine::Error,
568 "clspv restriction: UBO structures may not have implicit padding");
alan-baker3d9e2012019-01-11 14:55:30 -0500569 CustomDiagnosticsIDMap[CustomDiagnosticUBOArrayStride] = DE.getCustomDiagID(
570 DiagnosticsEngine::Error,
571 "clspv restriction: to satisfy UBO ArrayStride restrictions, element "
572 "size must be a multiple of array alignment");
573 CustomDiagnosticsIDMap[CustomDiagnosticLocationInfo] =
574 DE.getCustomDiagID(DiagnosticsEngine::Note, "here");
alan-baker9bb09792019-03-25 11:25:13 -0400575 CustomDiagnosticsIDMap[CustomDiagnosticSSBOUnalignedArray] =
576 DE.getCustomDiagID(
577 DiagnosticsEngine::Error,
578 "in a SSBO, arrays must be aligned to their element alignment");
579 CustomDiagnosticsIDMap[CustomDiagnosticSSBOUnalignedStruct] =
580 DE.getCustomDiagID(DiagnosticsEngine::Error,
581 "in a SSBO, structs must be aligned to their "
582 "largest element alignment");
Kévin Petit0fc88042019-04-09 23:25:02 +0100583 CustomDiagnosticsIDMap[CustomDiagnosticOverloadedKernel] =
584 DE.getCustomDiagID(DiagnosticsEngine::Error,
585 "kernel functions can't be overloaded");
alan-baker990e9b92019-06-07 11:26:39 -0400586 CustomDiagnosticsIDMap[CustomDiagnosticStructContainsPointer] =
587 DE.getCustomDiagID(DiagnosticsEngine::Error,
588 "structures may not contain pointers");
alan-baker28361f72020-01-07 16:35:25 -0500589 CustomDiagnosticsIDMap[CustomDiagnosticRecursiveStruct] =
590 DE.getCustomDiagID(DiagnosticsEngine::Error,
591 "recursive structures are not supported");
alan-baker9b0ec3c2020-04-06 14:45:34 -0400592 CustomDiagnosticsIDMap[CustomDiagnosticPushConstantSizeExceeded] =
593 DE.getCustomDiagID(DiagnosticsEngine::Error,
594 "max push constant size exceeded");
595 CustomDiagnosticsIDMap[CustomDiagnosticPushConstantContainsArray] =
596 DE.getCustomDiagID(
597 DiagnosticsEngine::Error,
598 "arrays are not supported in push constants currently");
alan-baker7efcaaa2020-05-06 19:33:27 -0400599 CustomDiagnosticsIDMap[CustomDiagnosticUnsupported16BitStorage] =
600 DE.getCustomDiagID(DiagnosticsEngine::Error,
601 "16-bit storage is not supported for "
602 "%select{SSBOs|UBOs|push constants}0");
603 CustomDiagnosticsIDMap[CustomDiagnosticUnsupported8BitStorage] =
604 DE.getCustomDiagID(DiagnosticsEngine::Error,
605 "8-bit storage is not supported for "
606 "%select{SSBOs|UBOs|push constants}0");
alan-bakerfec0a472018-11-08 18:09:40 -0500607 }
608
609 virtual bool HandleTopLevelDecl(DeclGroupRef DG) override {
610 for (auto *D : DG) {
611 if (auto *FD = llvm::dyn_cast<FunctionDecl>(D)) {
612 // If the function has a body it means we are not an OpenCL builtin
613 // function.
614 if (FD->hasBody()) {
615 if (!IsSupportedType(FD->getReturnType(),
Marco Antognini535998c2020-09-16 18:48:51 +0100616 FD->getReturnTypeSourceRange(), false)) {
alan-bakerfec0a472018-11-08 18:09:40 -0500617 return false;
618 }
619
620 bool is_opencl_kernel = false;
621 if (FD->hasAttrs()) {
622 for (auto *attr : FD->attrs()) {
623 if (attr->getKind() == attr::Kind::OpenCLKernel) {
624 is_opencl_kernel = true;
625 }
626 }
627 }
628
Kévin Petit0fc88042019-04-09 23:25:02 +0100629 if (is_opencl_kernel) {
alan-baker21574d32020-01-29 16:00:31 -0500630 if (Kernels.count(FD->getName().str()) != 0) {
Kévin Petit0fc88042019-04-09 23:25:02 +0100631 auto srcRange = FD->getSourceRange();
632 Report(CustomDiagnosticOverloadedKernel, srcRange, srcRange);
633 } else {
alan-baker21574d32020-01-29 16:00:31 -0500634 Kernels.insert(FD->getName().str());
Kévin Petit0fc88042019-04-09 23:25:02 +0100635 }
636 }
637
alan-baker9b0ec3c2020-04-06 14:45:34 -0400638 RecordDecl *clustered_args = nullptr;
639 if (is_opencl_kernel && clspv::Option::PodArgsInPushConstants()) {
640 clustered_args = FD->getASTContext().buildImplicitRecord(
641 "__clspv.clustered_args." + std::to_string(kClusteredCount++));
642 clustered_args->startDefinition();
643 }
alan-bakerfec0a472018-11-08 18:09:40 -0500644 for (auto *P : FD->parameters()) {
645 auto type = P->getType();
Marco Antognini535998c2020-09-16 18:48:51 +0100646 if (!IsSupportedType(P->getOriginalType(), P->getSourceRange(),
647 is_opencl_kernel)) {
alan-bakerfec0a472018-11-08 18:09:40 -0500648 return false;
649 }
650
alan-baker9bb09792019-03-25 11:25:13 -0400651 if (is_opencl_kernel && type->isPointerType() &&
652 ((type->getPointeeType().getAddressSpace() ==
653 LangAS::opencl_constant) ||
654 (type->getPointeeType().getAddressSpace() ==
655 LangAS::opencl_global))) {
alan-baker3d9e2012019-01-11 14:55:30 -0500656 // The argument will be generated as an array within a block.
657 // Generate an array type to check the validity for the generated
658 // case.
alan-baker9bb09792019-03-25 11:25:13 -0400659 Layout layout = SSBO;
660 if (clspv::Option::ConstantArgsInUniformBuffer() &&
661 !clspv::Option::Std430UniformBufferLayout() &&
662 type->getPointeeType().getAddressSpace() ==
663 LangAS::opencl_constant) {
664 layout = UBO;
665 }
alan-baker3d9e2012019-01-11 14:55:30 -0500666 auto array_type = FD->getASTContext().getIncompleteArrayType(
667 type->getPointeeType(), clang::ArrayType::Normal, 0);
alan-baker9bb09792019-03-25 11:25:13 -0400668 if (!IsSupportedLayout(array_type, 0, layout, FD->getASTContext(),
669 P->getSourceRange(),
670 P->getSourceRange())) {
alan-bakerfec0a472018-11-08 18:09:40 -0500671 return false;
672 }
673 }
alan-baker038e9242019-04-19 22:14:41 -0400674
alan-baker7efcaaa2020-05-06 19:33:27 -0400675 // Check if storage capabilities are supported.
676 if (is_opencl_kernel) {
677 bool contains_16bit =
678 ContainsSizedType(type.getCanonicalType(), 16);
679 bool contains_8bit =
680 ContainsSizedType(type.getCanonicalType(), 8);
681 auto sc = clspv::Option::StorageClass::kSSBO;
682 if (type->isPointerType()) {
683 sc = ConvertToStorageClass(
684 type->getPointeeType().getAddressSpace());
685 } else if (clspv::Option::PodArgsInUniformBuffer()) {
686 sc = clspv::Option::StorageClass::kUBO;
687 } else if (clspv::Option::PodArgsInPushConstants()) {
688 sc = clspv::Option::StorageClass::kPushConstant;
689 }
alan-bakerfb7e2be2020-05-25 08:45:39 -0400690
691 if (type->isPointerType() ||
692 sc != clspv::Option::StorageClass::kSSBO ||
693 !clspv::Option::ClusterPodKernelArgs()) {
694 // For clustered pod args, assume we can fall back on
695 // type-mangling.
696 if (contains_16bit &&
697 !clspv::Option::Supports16BitStorageClass(sc)) {
698 Instance.getDiagnostics().Report(
699 P->getSourceRange().getBegin(),
700 CustomDiagnosticsIDMap
701 [CustomDiagnosticUnsupported16BitStorage])
702 << static_cast<int>(sc);
703 }
704 if (contains_8bit &&
705 !clspv::Option::Supports8BitStorageClass(sc)) {
706 Instance.getDiagnostics().Report(
707 P->getSourceRange().getBegin(),
708 CustomDiagnosticsIDMap
709 [CustomDiagnosticUnsupported8BitStorage])
710 << static_cast<int>(sc);
711 }
alan-baker7efcaaa2020-05-06 19:33:27 -0400712 }
713 }
714
alan-baker28361f72020-01-07 16:35:25 -0500715 if (is_opencl_kernel && type->isPointerType()) {
716 auto pointee_type = type->getPointeeType().getCanonicalType();
717 if (ContainsPointerType(pointee_type)) {
718 Instance.getDiagnostics().Report(
719 P->getSourceRange().getBegin(),
720 CustomDiagnosticsIDMap
721 [CustomDiagnosticStructContainsPointer]);
722 return false;
723 }
724 }
725
alan-baker038e9242019-04-19 22:14:41 -0400726 if (is_opencl_kernel && !type->isPointerType()) {
alan-baker9b0ec3c2020-04-06 14:45:34 -0400727 if (clspv::Option::PodArgsInPushConstants()) {
728 // Don't allow arrays in push constants currently.
729 if (ContainsArrayType(type)) {
730 Report(CustomDiagnosticPushConstantContainsArray,
731 P->getSourceRange(), P->getSourceRange());
732 return false;
733 }
734 FieldDecl *field_decl = FieldDecl::Create(
735 FD->getASTContext(),
736 Decl::castToDeclContext(clustered_args),
737 P->getSourceRange().getBegin(),
738 P->getSourceRange().getEnd(), P->getIdentifier(),
739 P->getType(), nullptr, nullptr, false, ICIS_NoInit);
740 field_decl->setAccess(AS_public);
741 clustered_args->addDecl(field_decl);
742 } else {
743 Layout layout = SSBO;
744 if (clspv::Option::PodArgsInUniformBuffer() &&
745 !clspv::Option::Std430UniformBufferLayout())
746 layout = UBO;
alan-baker038e9242019-04-19 22:14:41 -0400747
alan-baker9b0ec3c2020-04-06 14:45:34 -0400748 if (!IsSupportedLayout(type, 0, layout, FD->getASTContext(),
749 P->getSourceRange(),
750 P->getSourceRange())) {
751 return false;
752 }
753 }
754 }
755 }
756
757 if (clustered_args) {
758 clustered_args->completeDefinition();
759 if (!clustered_args->field_empty()) {
760 auto record_type =
761 FD->getASTContext().getRecordType(clustered_args);
762 if (!IsSupportedLayout(record_type, 0, SSBO, FD->getASTContext(),
763 FD->getSourceRange(),
764 FD->getSourceRange())) {
765 return false;
766 }
767
768 if (FD->getASTContext()
769 .getTypeSizeInChars(record_type)
770 .getQuantity() > clspv::Option::MaxPushConstantsSize()) {
771 Report(CustomDiagnosticPushConstantSizeExceeded,
772 FD->getSourceRange(), FD->getSourceRange());
alan-baker038e9242019-04-19 22:14:41 -0400773 return false;
774 }
775 }
alan-bakerfec0a472018-11-08 18:09:40 -0500776 }
777
778 // Check for unsupported vector types.
779 Visitor.TraverseDecl(FD);
780 }
781 }
782 }
783
784 return true;
785 }
786};
787} // namespace
788
789namespace clspv {
790std::unique_ptr<ASTConsumer>
791ExtraValidationASTAction::CreateASTConsumer(CompilerInstance &CI,
792 llvm::StringRef InFile) {
793 return std::unique_ptr<ASTConsumer>(new ExtraValidationConsumer(CI, InFile));
794}
795} // namespace clspv