1 // Copyright 2018 The Amber Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "src/vulkan/device.h"
16
17 #include <algorithm>
18 #include <cstring>
19 #include <iomanip> // Vulkan wrappers: std::setw(), std::left/right
20 #include <iostream>
21 #include <memory>
22 #include <set>
23 #include <sstream>
24 #include <string>
25 #include <vector>
26
27 #include "src/make_unique.h"
28
29 namespace amber {
30 namespace vulkan {
31 namespace {
32
33 const char kVariablePointers[] = "VariablePointerFeatures.variablePointers";
34 const char kVariablePointersStorageBuffer[] =
35 "VariablePointerFeatures.variablePointersStorageBuffer";
36 const char kFloat16Int8_Float16[] = "Float16Int8Features.shaderFloat16";
37 const char kFloat16Int8_Int8[] = "Float16Int8Features.shaderInt8";
38 const char k8BitStorage_Storage[] =
39 "Storage8BitFeatures.storageBuffer8BitAccess";
40 const char k8BitStorage_UniformAndStorage[] =
41 "Storage8BitFeatures.uniformAndStorageBuffer8BitAccess";
42 const char k8BitStorage_PushConstant[] =
43 "Storage8BitFeatures.storagePushConstant8";
44 const char k16BitStorage_Storage[] =
45 "Storage16BitFeatures.storageBuffer16BitAccess";
46 const char k16BitStorage_UniformAndStorage[] =
47 "Storage16BitFeatures.uniformAndStorageBuffer16BitAccess";
48 const char k16BitStorage_PushConstant[] =
49 "Storage16BitFeatures.storagePushConstant16";
50 const char k16BitStorage_InputOutput[] =
51 "Storage16BitFeatures.storageInputOutput16";
52
53 const char kSubgroupSizeControl[] = "SubgroupSizeControl.subgroupSizeControl";
54 const char kComputeFullSubgroups[] = "SubgroupSizeControl.computeFullSubgroups";
55
56 const char kSubgroupSupportedOperations[] = "SubgroupSupportedOperations";
57 const char kSubgroupSupportedOperationsBasic[] =
58 "SubgroupSupportedOperations.basic";
59 const char kSubgroupSupportedOperationsVote[] =
60 "SubgroupSupportedOperations.vote";
61 const char kSubgroupSupportedOperationsArithmetic[] =
62 "SubgroupSupportedOperations.arithmetic";
63 const char kSubgroupSupportedOperationsBallot[] =
64 "SubgroupSupportedOperations.ballot";
65 const char kSubgroupSupportedOperationsShuffle[] =
66 "SubgroupSupportedOperations.shuffle";
67 const char kSubgroupSupportedOperationsShuffleRelative[] =
68 "SubgroupSupportedOperations.shuffleRelative";
69 const char kSubgroupSupportedOperationsClustered[] =
70 "SubgroupSupportedOperations.clustered";
71 const char kSubgroupSupportedOperationsQuad[] =
72 "SubgroupSupportedOperations.quad";
73 const char kSubgroupSupportedStages[] = "SubgroupSupportedStages";
74 const char kSubgroupSupportedStagesVertex[] = "SubgroupSupportedStages.vertex";
75 const char kSubgroupSupportedStagesTessellationControl[] =
76 "SubgroupSupportedStages.tessellationControl";
77 const char kSubgroupSupportedStagesTessellationEvaluation[] =
78 "SubgroupSupportedStages.tessellationEvaluation";
79 const char kSubgroupSupportedStagesGeometry[] =
80 "SubgroupSupportedStages.geometry";
81 const char kSubgroupSupportedStagesFragment[] =
82 "SubgroupSupportedStages.fragment";
83 const char kSubgroupSupportedStagesCompute[] =
84 "SubgroupSupportedStages.compute";
85
86 struct BaseOutStructure {
87 VkStructureType sType;
88 void* pNext;
89 };
90
AreAllRequiredFeaturesSupported(const VkPhysicalDeviceFeatures & available_features,const std::vector<std::string> & required_features)91 bool AreAllRequiredFeaturesSupported(
92 const VkPhysicalDeviceFeatures& available_features,
93 const std::vector<std::string>& required_features) {
94 if (required_features.empty())
95 return true;
96
97 for (const auto& feature : required_features) {
98 if (feature == "robustBufferAccess") {
99 if (available_features.robustBufferAccess == VK_FALSE)
100 return false;
101 continue;
102 }
103 if (feature == "fullDrawIndexUint32") {
104 if (available_features.fullDrawIndexUint32 == VK_FALSE)
105 return false;
106 continue;
107 }
108 if (feature == "imageCubeArray") {
109 if (available_features.imageCubeArray == VK_FALSE)
110 return false;
111 continue;
112 }
113 if (feature == "independentBlend") {
114 if (available_features.independentBlend == VK_FALSE)
115 return false;
116 continue;
117 }
118 if (feature == "geometryShader") {
119 if (available_features.geometryShader == VK_FALSE)
120 return false;
121 continue;
122 }
123 if (feature == "tessellationShader") {
124 if (available_features.tessellationShader == VK_FALSE)
125 return false;
126 continue;
127 }
128 if (feature == "sampleRateShading") {
129 if (available_features.sampleRateShading == VK_FALSE)
130 return false;
131 continue;
132 }
133 if (feature == "dualSrcBlend") {
134 if (available_features.dualSrcBlend == VK_FALSE)
135 return false;
136 continue;
137 }
138 if (feature == "logicOp") {
139 if (available_features.logicOp == VK_FALSE)
140 return false;
141 continue;
142 }
143 if (feature == "multiDrawIndirect") {
144 if (available_features.multiDrawIndirect == VK_FALSE)
145 return false;
146 continue;
147 }
148 if (feature == "drawIndirectFirstInstance") {
149 if (available_features.drawIndirectFirstInstance == VK_FALSE)
150 return false;
151 continue;
152 }
153 if (feature == "depthClamp") {
154 if (available_features.depthClamp == VK_FALSE)
155 return false;
156 continue;
157 }
158 if (feature == "depthBiasClamp") {
159 if (available_features.depthBiasClamp == VK_FALSE)
160 return false;
161 continue;
162 }
163 if (feature == "fillModeNonSolid") {
164 if (available_features.fillModeNonSolid == VK_FALSE)
165 return false;
166 continue;
167 }
168 if (feature == "depthBounds") {
169 if (available_features.depthBounds == VK_FALSE)
170 return false;
171 continue;
172 }
173 if (feature == "wideLines") {
174 if (available_features.wideLines == VK_FALSE)
175 return false;
176 continue;
177 }
178 if (feature == "largePoints") {
179 if (available_features.largePoints == VK_FALSE)
180 return false;
181 continue;
182 }
183 if (feature == "alphaToOne") {
184 if (available_features.alphaToOne == VK_FALSE)
185 return false;
186 continue;
187 }
188 if (feature == "multiViewport") {
189 if (available_features.multiViewport == VK_FALSE)
190 return false;
191 continue;
192 }
193 if (feature == "samplerAnisotropy") {
194 if (available_features.samplerAnisotropy == VK_FALSE)
195 return false;
196 continue;
197 }
198 if (feature == "textureCompressionETC2") {
199 if (available_features.textureCompressionETC2 == VK_FALSE)
200 return false;
201 continue;
202 }
203 if (feature == "textureCompressionASTC_LDR") {
204 if (available_features.textureCompressionASTC_LDR == VK_FALSE)
205 return false;
206 continue;
207 }
208 if (feature == "textureCompressionBC") {
209 if (available_features.textureCompressionBC == VK_FALSE)
210 return false;
211 continue;
212 }
213 if (feature == "occlusionQueryPrecise") {
214 if (available_features.occlusionQueryPrecise == VK_FALSE)
215 return false;
216 continue;
217 }
218 if (feature == "pipelineStatisticsQuery") {
219 if (available_features.pipelineStatisticsQuery == VK_FALSE)
220 return false;
221 continue;
222 }
223 if (feature == "vertexPipelineStoresAndAtomics") {
224 if (available_features.vertexPipelineStoresAndAtomics == VK_FALSE)
225 return false;
226 continue;
227 }
228 if (feature == "fragmentStoresAndAtomics") {
229 if (available_features.fragmentStoresAndAtomics == VK_FALSE)
230 return false;
231 continue;
232 }
233 if (feature == "shaderTessellationAndGeometryPointSize") {
234 if (available_features.shaderTessellationAndGeometryPointSize == VK_FALSE)
235 return false;
236 continue;
237 }
238 if (feature == "shaderImageGatherExtended") {
239 if (available_features.shaderImageGatherExtended == VK_FALSE)
240 return false;
241 continue;
242 }
243 if (feature == "shaderStorageImageExtendedFormats") {
244 if (available_features.shaderStorageImageExtendedFormats == VK_FALSE)
245 return false;
246 continue;
247 }
248 if (feature == "shaderStorageImageMultisample") {
249 if (available_features.shaderStorageImageMultisample == VK_FALSE)
250 return false;
251 continue;
252 }
253 if (feature == "shaderStorageImageReadWithoutFormat") {
254 if (available_features.shaderStorageImageReadWithoutFormat == VK_FALSE)
255 return false;
256 continue;
257 }
258 if (feature == "shaderStorageImageWriteWithoutFormat") {
259 if (available_features.shaderStorageImageWriteWithoutFormat == VK_FALSE)
260 return false;
261 continue;
262 }
263 if (feature == "shaderUniformBufferArrayDynamicIndexing") {
264 if (available_features.shaderUniformBufferArrayDynamicIndexing ==
265 VK_FALSE)
266 return false;
267 continue;
268 }
269 if (feature == "shaderSampledImageArrayDynamicIndexing") {
270 if (available_features.shaderSampledImageArrayDynamicIndexing == VK_FALSE)
271 return false;
272 continue;
273 }
274 if (feature == "shaderStorageBufferArrayDynamicIndexing") {
275 if (available_features.shaderStorageBufferArrayDynamicIndexing ==
276 VK_FALSE)
277 return false;
278 continue;
279 }
280 if (feature == "shaderStorageImageArrayDynamicIndexing") {
281 if (available_features.shaderStorageImageArrayDynamicIndexing == VK_FALSE)
282 return false;
283 continue;
284 }
285 if (feature == "shaderClipDistance") {
286 if (available_features.shaderClipDistance == VK_FALSE)
287 return false;
288 continue;
289 }
290 if (feature == "shaderCullDistance") {
291 if (available_features.shaderCullDistance == VK_FALSE)
292 return false;
293 continue;
294 }
295 if (feature == "shaderFloat64") {
296 if (available_features.shaderFloat64 == VK_FALSE)
297 return false;
298 continue;
299 }
300 if (feature == "shaderInt64") {
301 if (available_features.shaderInt64 == VK_FALSE)
302 return false;
303 continue;
304 }
305 if (feature == "shaderInt16") {
306 if (available_features.shaderInt16 == VK_FALSE)
307 return false;
308 continue;
309 }
310 if (feature == "shaderResourceResidency") {
311 if (available_features.shaderResourceResidency == VK_FALSE)
312 return false;
313 continue;
314 }
315 if (feature == "shaderResourceMinLod") {
316 if (available_features.shaderResourceMinLod == VK_FALSE)
317 return false;
318 continue;
319 }
320 if (feature == "sparseBinding") {
321 if (available_features.sparseBinding == VK_FALSE)
322 return false;
323 continue;
324 }
325 if (feature == "sparseResidencyBuffer") {
326 if (available_features.sparseResidencyBuffer == VK_FALSE)
327 return false;
328 continue;
329 }
330 if (feature == "sparseResidencyImage2D") {
331 if (available_features.sparseResidencyImage2D == VK_FALSE)
332 return false;
333 continue;
334 }
335 if (feature == "sparseResidencyImage3D") {
336 if (available_features.sparseResidencyImage3D == VK_FALSE)
337 return false;
338 continue;
339 }
340 if (feature == "sparseResidency2Samples") {
341 if (available_features.sparseResidency2Samples == VK_FALSE)
342 return false;
343 continue;
344 }
345 if (feature == "sparseResidency4Samples") {
346 if (available_features.sparseResidency4Samples == VK_FALSE)
347 return false;
348 continue;
349 }
350 if (feature == "sparseResidency8Samples") {
351 if (available_features.sparseResidency8Samples == VK_FALSE)
352 return false;
353 continue;
354 }
355 if (feature == "sparseResidency16Samples") {
356 if (available_features.sparseResidency16Samples == VK_FALSE)
357 return false;
358 continue;
359 }
360 if (feature == "sparseResidencyAliased") {
361 if (available_features.sparseResidencyAliased == VK_FALSE)
362 return false;
363 continue;
364 }
365 if (feature == "variableMultisampleRate") {
366 if (available_features.variableMultisampleRate == VK_FALSE)
367 return false;
368 continue;
369 }
370 if (feature == "inheritedQueries") {
371 if (available_features.inheritedQueries == VK_FALSE)
372 return false;
373 continue;
374 }
375 }
376
377 return true;
378 }
379
AreAllExtensionsSupported(const std::vector<std::string> & available_extensions,const std::vector<std::string> & required_extensions)380 bool AreAllExtensionsSupported(
381 const std::vector<std::string>& available_extensions,
382 const std::vector<std::string>& required_extensions) {
383 if (required_extensions.empty())
384 return true;
385
386 std::set<std::string> required_extension_set(required_extensions.begin(),
387 required_extensions.end());
388 for (const auto& extension : available_extensions) {
389 required_extension_set.erase(extension);
390 }
391
392 return required_extension_set.empty();
393 }
394
395 } // namespace
396
Device(VkInstance instance,VkPhysicalDevice physical_device,uint32_t queue_family_index,VkDevice device,VkQueue queue)397 Device::Device(VkInstance instance,
398 VkPhysicalDevice physical_device,
399 uint32_t queue_family_index,
400 VkDevice device,
401 VkQueue queue)
402 : instance_(instance),
403 physical_device_(physical_device),
404 device_(device),
405 queue_(queue),
406 queue_family_index_(queue_family_index) {}
407
408 Device::~Device() = default;
409
LoadVulkanPointers(PFN_vkGetInstanceProcAddr getInstanceProcAddr,Delegate * delegate)410 Result Device::LoadVulkanPointers(PFN_vkGetInstanceProcAddr getInstanceProcAddr,
411 Delegate* delegate) {
412 // Note: logging Vulkan calls is done via the delegate rather than a Vulkan
413 // layer because we want such logging even when Amber is built as a native
414 // executable on Android, where Vulkan layers are usable only with APKs.
415 if (delegate && delegate->LogGraphicsCalls())
416 delegate->Log("Loading Vulkan Pointers");
417
418 #include "vk-wrappers-1-0.inc"
419
420 ptrs_.vkGetPhysicalDeviceProperties(physical_device_,
421 &physical_device_properties_);
422
423 if (SupportsApiVersion(1, 1, 0)) {
424 #include "vk-wrappers-1-1.inc"
425 }
426
427 return {};
428 }
429
SupportsApiVersion(uint32_t major,uint32_t minor,uint32_t patch)430 bool Device::SupportsApiVersion(uint32_t major,
431 uint32_t minor,
432 uint32_t patch) {
433 #pragma clang diagnostic push
434 #pragma clang diagnostic ignored "-Wold-style-cast"
435 return physical_device_properties_.apiVersion >=
436 VK_MAKE_VERSION(major, minor, patch);
437 #pragma clang diagnostic pop
438 }
439
Initialize(PFN_vkGetInstanceProcAddr getInstanceProcAddr,Delegate * delegate,const std::vector<std::string> & required_features,const std::vector<std::string> & required_device_extensions,const VkPhysicalDeviceFeatures & available_features,const VkPhysicalDeviceFeatures2KHR & available_features2,const std::vector<std::string> & available_extensions)440 Result Device::Initialize(
441 PFN_vkGetInstanceProcAddr getInstanceProcAddr,
442 Delegate* delegate,
443 const std::vector<std::string>& required_features,
444 const std::vector<std::string>& required_device_extensions,
445 const VkPhysicalDeviceFeatures& available_features,
446 const VkPhysicalDeviceFeatures2KHR& available_features2,
447 const std::vector<std::string>& available_extensions) {
448 Result r = LoadVulkanPointers(getInstanceProcAddr, delegate);
449 if (!r.IsSuccess())
450 return r;
451
452 // Check for the core features. We don't know if available_features or
453 // available_features2 is provided, so check both.
454 if (!AreAllRequiredFeaturesSupported(available_features, required_features) &&
455 !AreAllRequiredFeaturesSupported(available_features2.features,
456 required_features)) {
457 return Result(
458 "Vulkan: Device::Initialize given physical device does not support "
459 "required features");
460 }
461
462 // Search for additional features in case they are found in pNext field of
463 // available_features2.
464 VkPhysicalDeviceVariablePointerFeaturesKHR* var_ptrs = nullptr;
465 VkPhysicalDeviceFloat16Int8FeaturesKHR* float16_ptrs = nullptr;
466 VkPhysicalDevice8BitStorageFeaturesKHR* storage8_ptrs = nullptr;
467 VkPhysicalDevice16BitStorageFeaturesKHR* storage16_ptrs = nullptr;
468 VkPhysicalDeviceVulkan11Features* vulkan11_ptrs = nullptr;
469 VkPhysicalDeviceVulkan12Features* vulkan12_ptrs = nullptr;
470 VkPhysicalDeviceVulkan13Features* vulkan13_ptrs = nullptr;
471 VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*
472 subgroup_size_control_features = nullptr;
473 void* ptr = available_features2.pNext;
474 while (ptr != nullptr) {
475 BaseOutStructure* s = static_cast<BaseOutStructure*>(ptr);
476 switch (s->sType) {
477 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR:
478 var_ptrs =
479 static_cast<VkPhysicalDeviceVariablePointerFeaturesKHR*>(ptr);
480 break;
481 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR:
482 float16_ptrs =
483 static_cast<VkPhysicalDeviceFloat16Int8FeaturesKHR*>(ptr);
484 break;
485 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR:
486 storage8_ptrs =
487 static_cast<VkPhysicalDevice8BitStorageFeaturesKHR*>(ptr);
488 break;
489 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR:
490 storage16_ptrs =
491 static_cast<VkPhysicalDevice16BitStorageFeaturesKHR*>(ptr);
492 break;
493 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT:
494 subgroup_size_control_features =
495 static_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>(ptr);
496 break;
497 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
498 vulkan11_ptrs = static_cast<VkPhysicalDeviceVulkan11Features*>(ptr);
499 break;
500 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
501 vulkan12_ptrs = static_cast<VkPhysicalDeviceVulkan12Features*>(ptr);
502 break;
503 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES:
504 vulkan13_ptrs = static_cast<VkPhysicalDeviceVulkan13Features*>(ptr);
505 break;
506 default:
507 break;
508 }
509 ptr = s->pNext;
510 }
511
512 // Compare the available additional (non-core) features against the
513 // requirements.
514 //
515 // Vulkan 1.2 added support for defining non-core physical device features
516 // using VkPhysicalDeviceVulkan11Features and VkPhysicalDeviceVulkan12Features
517 // structures. If |vulkan11_ptrs| and/or |vulkan12_ptrs| are null, we must
518 // check for features using the old approach (by checking across various
519 // feature structs); otherwise, we can check features via the new structs.
520 for (const auto& feature : required_features) {
521 // First check the feature structures are provided for the required
522 // features.
523 if ((feature == kVariablePointers ||
524 feature == kVariablePointersStorageBuffer) &&
525 var_ptrs == nullptr && vulkan11_ptrs == nullptr) {
526 return amber::Result(
527 "Variable pointers requested but feature not returned");
528 }
529 if ((feature == k16BitStorage_Storage ||
530 feature == k16BitStorage_UniformAndStorage ||
531 feature == k16BitStorage_PushConstant ||
532 feature == k16BitStorage_InputOutput) &&
533 storage16_ptrs == nullptr && vulkan11_ptrs == nullptr) {
534 return amber::Result(
535 "Shader 16-bit storage requested but feature not returned");
536 }
537 if ((feature == kFloat16Int8_Float16 || feature == kFloat16Int8_Int8) &&
538 float16_ptrs == nullptr && vulkan12_ptrs == nullptr) {
539 return amber::Result(
540 "Shader float16/int8 requested but feature not returned");
541 }
542 if ((feature == k8BitStorage_UniformAndStorage ||
543 feature == k8BitStorage_Storage ||
544 feature == k8BitStorage_PushConstant) &&
545 storage8_ptrs == nullptr && vulkan12_ptrs == nullptr) {
546 return amber::Result(
547 "Shader 8-bit storage requested but feature not returned");
548 }
549 if ((feature == kSubgroupSizeControl || feature == kComputeFullSubgroups) &&
550 subgroup_size_control_features == nullptr && vulkan13_ptrs == nullptr) {
551 return amber::Result("Missing subgroup size control features");
552 }
553
554 // Next check the fields of the feature structures.
555
556 // If Vulkan 1.1 structure exists the features are set there.
557 if (vulkan11_ptrs) {
558 if (feature == kVariablePointers &&
559 vulkan11_ptrs->variablePointers != VK_TRUE) {
560 return amber::Result("Missing variable pointers feature");
561 }
562 if (feature == kVariablePointersStorageBuffer &&
563 vulkan11_ptrs->variablePointersStorageBuffer != VK_TRUE) {
564 return amber::Result(
565 "Missing variable pointers storage buffer feature");
566 }
567 if (feature == k16BitStorage_Storage &&
568 vulkan11_ptrs->storageBuffer16BitAccess != VK_TRUE) {
569 return amber::Result("Missing 16-bit storage access");
570 }
571 if (feature == k16BitStorage_UniformAndStorage &&
572 vulkan11_ptrs->uniformAndStorageBuffer16BitAccess != VK_TRUE) {
573 return amber::Result("Missing 16-bit uniform and storage access");
574 }
575 if (feature == k16BitStorage_PushConstant &&
576 vulkan11_ptrs->storagePushConstant16 != VK_TRUE) {
577 return amber::Result("Missing 16-bit push constant access");
578 }
579 if (feature == k16BitStorage_InputOutput &&
580 vulkan11_ptrs->storageInputOutput16 != VK_TRUE) {
581 return amber::Result("Missing 16-bit input/output access");
582 }
583 } else {
584 // Vulkan 1.1 structure was not found. Use separate structures per each
585 // feature.
586 if (feature == kVariablePointers &&
587 var_ptrs->variablePointers != VK_TRUE) {
588 return amber::Result("Missing variable pointers feature");
589 }
590 if (feature == kVariablePointersStorageBuffer &&
591 var_ptrs->variablePointersStorageBuffer != VK_TRUE) {
592 return amber::Result(
593 "Missing variable pointers storage buffer feature");
594 }
595 if (feature == k16BitStorage_Storage &&
596 storage16_ptrs->storageBuffer16BitAccess != VK_TRUE) {
597 return amber::Result("Missing 16-bit storage access");
598 }
599 if (feature == k16BitStorage_UniformAndStorage &&
600 storage16_ptrs->uniformAndStorageBuffer16BitAccess != VK_TRUE) {
601 return amber::Result("Missing 16-bit uniform and storage access");
602 }
603 if (feature == k16BitStorage_PushConstant &&
604 storage16_ptrs->storagePushConstant16 != VK_TRUE) {
605 return amber::Result("Missing 16-bit push constant access");
606 }
607 if (feature == k16BitStorage_InputOutput &&
608 storage16_ptrs->storageInputOutput16 != VK_TRUE) {
609 return amber::Result("Missing 16-bit input/output access");
610 }
611 }
612
613 // If Vulkan 1.2 structure exists the features are set there.
614 if (vulkan12_ptrs) {
615 if (feature == kFloat16Int8_Float16 &&
616 vulkan12_ptrs->shaderFloat16 != VK_TRUE) {
617 return amber::Result("Missing float16 feature");
618 }
619 if (feature == kFloat16Int8_Int8 &&
620 vulkan12_ptrs->shaderInt8 != VK_TRUE) {
621 return amber::Result("Missing int8 feature");
622 }
623 if (feature == k8BitStorage_Storage &&
624 vulkan12_ptrs->storageBuffer8BitAccess != VK_TRUE) {
625 return amber::Result("Missing 8-bit storage access");
626 }
627 if (feature == k8BitStorage_UniformAndStorage &&
628 vulkan12_ptrs->uniformAndStorageBuffer8BitAccess != VK_TRUE) {
629 return amber::Result("Missing 8-bit uniform and storage access");
630 }
631 if (feature == k8BitStorage_PushConstant &&
632 vulkan12_ptrs->storagePushConstant8 != VK_TRUE) {
633 return amber::Result("Missing 8-bit push constant access");
634 }
635 } else {
636 // Vulkan 1.2 structure was not found. Use separate structures per each
637 // feature.
638 if (feature == kFloat16Int8_Float16 &&
639 float16_ptrs->shaderFloat16 != VK_TRUE) {
640 return amber::Result("Missing float16 feature");
641 }
642 if (feature == kFloat16Int8_Int8 && float16_ptrs->shaderInt8 != VK_TRUE) {
643 return amber::Result("Missing int8 feature");
644 }
645 if (feature == k8BitStorage_Storage &&
646 storage8_ptrs->storageBuffer8BitAccess != VK_TRUE) {
647 return amber::Result("Missing 8-bit storage access");
648 }
649 if (feature == k8BitStorage_UniformAndStorage &&
650 storage8_ptrs->uniformAndStorageBuffer8BitAccess != VK_TRUE) {
651 return amber::Result("Missing 8-bit uniform and storage access");
652 }
653 if (feature == k8BitStorage_PushConstant &&
654 storage8_ptrs->storagePushConstant8 != VK_TRUE) {
655 return amber::Result("Missing 8-bit push constant access");
656 }
657 }
658
659 // If Vulkan 1.3 structure exists the features are set there.
660 if (vulkan13_ptrs) {
661 if (feature == kSubgroupSizeControl &&
662 vulkan13_ptrs->subgroupSizeControl != VK_TRUE) {
663 return amber::Result("Missing subgroup size control feature");
664 }
665 if (feature == kComputeFullSubgroups &&
666 vulkan13_ptrs->computeFullSubgroups != VK_TRUE) {
667 return amber::Result("Missing compute full subgroups feature");
668 }
669 } else {
670 if (feature == kSubgroupSizeControl &&
671 subgroup_size_control_features->subgroupSizeControl != VK_TRUE) {
672 return amber::Result("Missing subgroup size control feature");
673 }
674 if (feature == kComputeFullSubgroups &&
675 subgroup_size_control_features->computeFullSubgroups != VK_TRUE) {
676 return amber::Result("Missing compute full subgroups feature");
677 }
678 }
679 }
680
681 if (!AreAllExtensionsSupported(available_extensions,
682 required_device_extensions)) {
683 return Result(
684 "Vulkan: Device::Initialize given physical device does not support "
685 "required extensions");
686 }
687
688 ptrs_.vkGetPhysicalDeviceMemoryProperties(physical_device_,
689 &physical_memory_properties_);
690
691 subgroup_size_control_properties_ = {};
692 const bool needs_subgroup_size_control =
693 std::find(required_features.begin(), required_features.end(),
694 kSubgroupSizeControl) != required_features.end();
695
696 bool needs_subgroup_supported_operations = false;
697 bool needs_subgroup_supported_stages = false;
698
699 // Search for subgroup supported operations requirements.
700 for (const auto& feature : required_features)
701 if (feature.find(kSubgroupSupportedOperations) != std::string::npos)
702 needs_subgroup_supported_operations = true;
703
704 // Search for subgroup supported stages requirements.
705 for (const auto& feature : required_features)
706 if (feature.find(kSubgroupSupportedStages) != std::string::npos)
707 needs_subgroup_supported_stages = true;
708
709 const bool needs_subgroup_properties =
710 needs_subgroup_supported_operations || needs_subgroup_supported_stages;
711
712 if (needs_subgroup_size_control || needs_subgroup_properties) {
713 // Always chain all physical device properties structs in case at least one
714 // of them is needed.
715 VkPhysicalDeviceProperties2 properties2 = {};
716 VkPhysicalDeviceSubgroupProperties subgroup_properties = {};
717 VkPhysicalDeviceVulkan11Properties vulkan11_properties = {};
718 VkSubgroupFeatureFlags subgroup_supported_operations;
719 VkShaderStageFlags subgroup_supported_stages;
720 properties2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
721 properties2.pNext = &subgroup_size_control_properties_;
722 subgroup_size_control_properties_.sType =
723 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT;
724 if (SupportsApiVersion(1, 2, 0)) {
725 subgroup_size_control_properties_.pNext = &vulkan11_properties;
726 vulkan11_properties.sType =
727 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES;
728 } else {
729 subgroup_size_control_properties_.pNext = &subgroup_properties;
730 subgroup_properties.sType =
731 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
732 }
733
734 if (needs_subgroup_size_control && !SupportsApiVersion(1, 1, 0)) {
735 return Result(
736 "Vulkan: Device::Initialize subgroup size control feature also "
737 "requires an API version of 1.1 or higher");
738 }
739 if (needs_subgroup_properties && !SupportsApiVersion(1, 1, 0)) {
740 return Result(
741 "Vulkan: Device::Initialize subgroup properties also "
742 "requires an API version of 1.1 or higher");
743 }
744 ptrs_.vkGetPhysicalDeviceProperties2(physical_device_, &properties2);
745
746 if (needs_subgroup_supported_operations) {
747 // Read supported subgroup operations from the correct struct depending on
748 // the device API
749 if (SupportsApiVersion(1, 2, 0)) {
750 subgroup_supported_operations =
751 vulkan11_properties.subgroupSupportedOperations;
752 } else {
753 subgroup_supported_operations = subgroup_properties.supportedOperations;
754 }
755
756 for (const auto& feature : required_features) {
757 if (feature == kSubgroupSupportedOperationsBasic &&
758 !(subgroup_supported_operations & VK_SUBGROUP_FEATURE_BASIC_BIT)) {
759 return amber::Result("Missing subgroup operation basic feature");
760 }
761 if (feature == kSubgroupSupportedOperationsVote &&
762 !(subgroup_supported_operations & VK_SUBGROUP_FEATURE_VOTE_BIT)) {
763 return amber::Result("Missing subgroup operation vote feature");
764 }
765 if (feature == kSubgroupSupportedOperationsArithmetic &&
766 !(subgroup_supported_operations &
767 VK_SUBGROUP_FEATURE_ARITHMETIC_BIT)) {
768 return amber::Result("Missing subgroup operation arithmetic feature");
769 }
770 if (feature == kSubgroupSupportedOperationsBallot &&
771 !(subgroup_supported_operations & VK_SUBGROUP_FEATURE_BALLOT_BIT)) {
772 return amber::Result("Missing subgroup operation ballot feature");
773 }
774 if (feature == kSubgroupSupportedOperationsShuffle &&
775 !(subgroup_supported_operations &
776 VK_SUBGROUP_FEATURE_SHUFFLE_BIT)) {
777 return amber::Result("Missing subgroup operation shuffle feature");
778 }
779 if (feature == kSubgroupSupportedOperationsShuffleRelative &&
780 !(subgroup_supported_operations &
781 VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT)) {
782 return amber::Result(
783 "Missing subgroup operation shuffle relative feature");
784 }
785 if (feature == kSubgroupSupportedOperationsClustered &&
786 !(subgroup_supported_operations &
787 VK_SUBGROUP_FEATURE_CLUSTERED_BIT)) {
788 return amber::Result("Missing subgroup operation clustered feature");
789 }
790 if (feature == kSubgroupSupportedOperationsQuad &&
791 !(subgroup_supported_operations & VK_SUBGROUP_FEATURE_QUAD_BIT)) {
792 return amber::Result("Missing subgroup operation quad feature");
793 }
794 }
795 }
796
797 if (needs_subgroup_supported_stages) {
798 // Read supported subgroup stages from the correct struct depending on the
799 // device API
800 if (SupportsApiVersion(1, 2, 0)) {
801 subgroup_supported_stages = vulkan11_properties.subgroupSupportedStages;
802 } else {
803 subgroup_supported_stages = subgroup_properties.supportedStages;
804 }
805
806 for (const auto& feature : required_features) {
807 if (feature == kSubgroupSupportedStagesVertex &&
808 !(subgroup_supported_stages & VK_SHADER_STAGE_VERTEX_BIT)) {
809 return amber::Result(
810 "Subgroup operations not supported for vertex shader stage");
811 }
812 if (feature == kSubgroupSupportedStagesTessellationControl &&
813 !(subgroup_supported_stages &
814 VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)) {
815 return amber::Result(
816 "Subgroup operations not supported for tessellation control "
817 "shader stage");
818 }
819 if (feature == kSubgroupSupportedStagesTessellationEvaluation &&
820 !(subgroup_supported_stages &
821 VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)) {
822 return amber::Result(
823 "Subgroup operations not supported for tessellation evaluation "
824 "shader stage");
825 }
826 if (feature == kSubgroupSupportedStagesGeometry &&
827 !(subgroup_supported_stages & VK_SHADER_STAGE_GEOMETRY_BIT)) {
828 return amber::Result(
829 "Subgroup operations not supported for geometry shader stage");
830 }
831 if (feature == kSubgroupSupportedStagesFragment &&
832 !(subgroup_supported_stages & VK_SHADER_STAGE_FRAGMENT_BIT)) {
833 return amber::Result(
834 "Subgroup operations not supported for fragment shader stage");
835 }
836 if (feature == kSubgroupSupportedStagesCompute &&
837 !(subgroup_supported_stages & VK_SHADER_STAGE_COMPUTE_BIT)) {
838 return amber::Result(
839 "Subgroup operations not supported for compute shader stage");
840 }
841 }
842 }
843 }
844
845 return {};
846 }
847
IsFormatSupportedByPhysicalDevice(const Format & format,BufferType type)848 bool Device::IsFormatSupportedByPhysicalDevice(const Format& format,
849 BufferType type) {
850 VkFormat vk_format = GetVkFormat(format);
851 VkFormatProperties properties = VkFormatProperties();
852 GetPtrs()->vkGetPhysicalDeviceFormatProperties(physical_device_, vk_format,
853 &properties);
854
855 VkFormatFeatureFlagBits flag = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT;
856 bool is_buffer_type_image = false;
857 switch (type) {
858 case BufferType::kColor:
859 case BufferType::kResolve:
860 case BufferType::kStorageImage:
861 flag = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
862 is_buffer_type_image = true;
863 break;
864 case BufferType::kDepthStencil:
865 flag = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;
866 is_buffer_type_image = true;
867 break;
868 case BufferType::kSampledImage:
869 case BufferType::kCombinedImageSampler:
870 flag = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
871 is_buffer_type_image = true;
872 break;
873 case BufferType::kVertex:
874 flag = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT;
875 is_buffer_type_image = false;
876 break;
877 default:
878 return false;
879 }
880
881 return ((is_buffer_type_image ? properties.optimalTilingFeatures
882 : properties.bufferFeatures) &
883 flag) == flag;
884 }
885
HasMemoryFlags(uint32_t memory_type_index,const VkMemoryPropertyFlags flags) const886 bool Device::HasMemoryFlags(uint32_t memory_type_index,
887 const VkMemoryPropertyFlags flags) const {
888 return (physical_memory_properties_.memoryTypes[memory_type_index]
889 .propertyFlags &
890 flags) == flags;
891 }
892
IsMemoryHostAccessible(uint32_t memory_type_index) const893 bool Device::IsMemoryHostAccessible(uint32_t memory_type_index) const {
894 return HasMemoryFlags(memory_type_index, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
895 }
896
IsMemoryHostCoherent(uint32_t memory_type_index) const897 bool Device::IsMemoryHostCoherent(uint32_t memory_type_index) const {
898 return HasMemoryFlags(memory_type_index,
899 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
900 }
901
GetMaxPushConstants() const902 uint32_t Device::GetMaxPushConstants() const {
903 return physical_device_properties_.limits.maxPushConstantsSize;
904 }
905
IsDescriptorSetInBounds(uint32_t descriptor_set) const906 bool Device::IsDescriptorSetInBounds(uint32_t descriptor_set) const {
907 VkPhysicalDeviceProperties properties = VkPhysicalDeviceProperties();
908 GetPtrs()->vkGetPhysicalDeviceProperties(physical_device_, &properties);
909 return properties.limits.maxBoundDescriptorSets > descriptor_set;
910 }
911
GetVkFormat(const Format & format) const912 VkFormat Device::GetVkFormat(const Format& format) const {
913 VkFormat ret = VK_FORMAT_UNDEFINED;
914 switch (format.GetFormatType()) {
915 case FormatType::kUnknown:
916 ret = VK_FORMAT_UNDEFINED;
917 break;
918 case FormatType::kA1R5G5B5_UNORM_PACK16:
919 ret = VK_FORMAT_A1R5G5B5_UNORM_PACK16;
920 break;
921 case FormatType::kA2B10G10R10_SINT_PACK32:
922 ret = VK_FORMAT_A2B10G10R10_SINT_PACK32;
923 break;
924 case FormatType::kA2B10G10R10_SNORM_PACK32:
925 ret = VK_FORMAT_A2B10G10R10_SNORM_PACK32;
926 break;
927 case FormatType::kA2B10G10R10_SSCALED_PACK32:
928 ret = VK_FORMAT_A2B10G10R10_SSCALED_PACK32;
929 break;
930 case FormatType::kA2B10G10R10_UINT_PACK32:
931 ret = VK_FORMAT_A2B10G10R10_UINT_PACK32;
932 break;
933 case FormatType::kA2B10G10R10_UNORM_PACK32:
934 ret = VK_FORMAT_A2B10G10R10_UNORM_PACK32;
935 break;
936 case FormatType::kA2B10G10R10_USCALED_PACK32:
937 ret = VK_FORMAT_A2B10G10R10_USCALED_PACK32;
938 break;
939 case FormatType::kA2R10G10B10_SINT_PACK32:
940 ret = VK_FORMAT_A2R10G10B10_SINT_PACK32;
941 break;
942 case FormatType::kA2R10G10B10_SNORM_PACK32:
943 ret = VK_FORMAT_A2R10G10B10_SNORM_PACK32;
944 break;
945 case FormatType::kA2R10G10B10_SSCALED_PACK32:
946 ret = VK_FORMAT_A2R10G10B10_SSCALED_PACK32;
947 break;
948 case FormatType::kA2R10G10B10_UINT_PACK32:
949 ret = VK_FORMAT_A2R10G10B10_UINT_PACK32;
950 break;
951 case FormatType::kA2R10G10B10_UNORM_PACK32:
952 ret = VK_FORMAT_A2R10G10B10_UNORM_PACK32;
953 break;
954 case FormatType::kA2R10G10B10_USCALED_PACK32:
955 ret = VK_FORMAT_A2R10G10B10_USCALED_PACK32;
956 break;
957 case FormatType::kA8B8G8R8_SINT_PACK32:
958 ret = VK_FORMAT_A8B8G8R8_SINT_PACK32;
959 break;
960 case FormatType::kA8B8G8R8_SNORM_PACK32:
961 ret = VK_FORMAT_A8B8G8R8_SNORM_PACK32;
962 break;
963 case FormatType::kA8B8G8R8_SRGB_PACK32:
964 ret = VK_FORMAT_A8B8G8R8_SRGB_PACK32;
965 break;
966 case FormatType::kA8B8G8R8_SSCALED_PACK32:
967 ret = VK_FORMAT_A8B8G8R8_SSCALED_PACK32;
968 break;
969 case FormatType::kA8B8G8R8_UINT_PACK32:
970 ret = VK_FORMAT_A8B8G8R8_UINT_PACK32;
971 break;
972 case FormatType::kA8B8G8R8_UNORM_PACK32:
973 ret = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
974 break;
975 case FormatType::kA8B8G8R8_USCALED_PACK32:
976 ret = VK_FORMAT_A8B8G8R8_USCALED_PACK32;
977 break;
978 case FormatType::kB10G11R11_UFLOAT_PACK32:
979 ret = VK_FORMAT_B10G11R11_UFLOAT_PACK32;
980 break;
981 case FormatType::kB4G4R4A4_UNORM_PACK16:
982 ret = VK_FORMAT_B4G4R4A4_UNORM_PACK16;
983 break;
984 case FormatType::kB5G5R5A1_UNORM_PACK16:
985 ret = VK_FORMAT_B5G5R5A1_UNORM_PACK16;
986 break;
987 case FormatType::kB5G6R5_UNORM_PACK16:
988 ret = VK_FORMAT_B5G6R5_UNORM_PACK16;
989 break;
990 case FormatType::kB8G8R8A8_SINT:
991 ret = VK_FORMAT_B8G8R8A8_SINT;
992 break;
993 case FormatType::kB8G8R8A8_SNORM:
994 ret = VK_FORMAT_B8G8R8A8_SNORM;
995 break;
996 case FormatType::kB8G8R8A8_SRGB:
997 ret = VK_FORMAT_B8G8R8A8_SRGB;
998 break;
999 case FormatType::kB8G8R8A8_SSCALED:
1000 ret = VK_FORMAT_B8G8R8A8_SSCALED;
1001 break;
1002 case FormatType::kB8G8R8A8_UINT:
1003 ret = VK_FORMAT_B8G8R8A8_UINT;
1004 break;
1005 case FormatType::kB8G8R8A8_UNORM:
1006 ret = VK_FORMAT_B8G8R8A8_UNORM;
1007 break;
1008 case FormatType::kB8G8R8A8_USCALED:
1009 ret = VK_FORMAT_B8G8R8A8_USCALED;
1010 break;
1011 case FormatType::kB8G8R8_SINT:
1012 ret = VK_FORMAT_B8G8R8_SINT;
1013 break;
1014 case FormatType::kB8G8R8_SNORM:
1015 ret = VK_FORMAT_B8G8R8_SNORM;
1016 break;
1017 case FormatType::kB8G8R8_SRGB:
1018 ret = VK_FORMAT_B8G8R8_SRGB;
1019 break;
1020 case FormatType::kB8G8R8_SSCALED:
1021 ret = VK_FORMAT_B8G8R8_SSCALED;
1022 break;
1023 case FormatType::kB8G8R8_UINT:
1024 ret = VK_FORMAT_B8G8R8_UINT;
1025 break;
1026 case FormatType::kB8G8R8_UNORM:
1027 ret = VK_FORMAT_B8G8R8_UNORM;
1028 break;
1029 case FormatType::kB8G8R8_USCALED:
1030 ret = VK_FORMAT_B8G8R8_USCALED;
1031 break;
1032 case FormatType::kD16_UNORM:
1033 ret = VK_FORMAT_D16_UNORM;
1034 break;
1035 case FormatType::kD16_UNORM_S8_UINT:
1036 ret = VK_FORMAT_D16_UNORM_S8_UINT;
1037 break;
1038 case FormatType::kD24_UNORM_S8_UINT:
1039 ret = VK_FORMAT_D24_UNORM_S8_UINT;
1040 break;
1041 case FormatType::kD32_SFLOAT:
1042 ret = VK_FORMAT_D32_SFLOAT;
1043 break;
1044 case FormatType::kD32_SFLOAT_S8_UINT:
1045 ret = VK_FORMAT_D32_SFLOAT_S8_UINT;
1046 break;
1047 case FormatType::kR16G16B16A16_SFLOAT:
1048 ret = VK_FORMAT_R16G16B16A16_SFLOAT;
1049 break;
1050 case FormatType::kR16G16B16A16_SINT:
1051 ret = VK_FORMAT_R16G16B16A16_SINT;
1052 break;
1053 case FormatType::kR16G16B16A16_SNORM:
1054 ret = VK_FORMAT_R16G16B16A16_SNORM;
1055 break;
1056 case FormatType::kR16G16B16A16_SSCALED:
1057 ret = VK_FORMAT_R16G16B16A16_SSCALED;
1058 break;
1059 case FormatType::kR16G16B16A16_UINT:
1060 ret = VK_FORMAT_R16G16B16A16_UINT;
1061 break;
1062 case FormatType::kR16G16B16A16_UNORM:
1063 ret = VK_FORMAT_R16G16B16A16_UNORM;
1064 break;
1065 case FormatType::kR16G16B16A16_USCALED:
1066 ret = VK_FORMAT_R16G16B16A16_USCALED;
1067 break;
1068 case FormatType::kR16G16B16_SFLOAT:
1069 ret = VK_FORMAT_R16G16B16_SFLOAT;
1070 break;
1071 case FormatType::kR16G16B16_SINT:
1072 ret = VK_FORMAT_R16G16B16_SINT;
1073 break;
1074 case FormatType::kR16G16B16_SNORM:
1075 ret = VK_FORMAT_R16G16B16_SNORM;
1076 break;
1077 case FormatType::kR16G16B16_SSCALED:
1078 ret = VK_FORMAT_R16G16B16_SSCALED;
1079 break;
1080 case FormatType::kR16G16B16_UINT:
1081 ret = VK_FORMAT_R16G16B16_UINT;
1082 break;
1083 case FormatType::kR16G16B16_UNORM:
1084 ret = VK_FORMAT_R16G16B16_UNORM;
1085 break;
1086 case FormatType::kR16G16B16_USCALED:
1087 ret = VK_FORMAT_R16G16B16_USCALED;
1088 break;
1089 case FormatType::kR16G16_SFLOAT:
1090 ret = VK_FORMAT_R16G16_SFLOAT;
1091 break;
1092 case FormatType::kR16G16_SINT:
1093 ret = VK_FORMAT_R16G16_SINT;
1094 break;
1095 case FormatType::kR16G16_SNORM:
1096 ret = VK_FORMAT_R16G16_SNORM;
1097 break;
1098 case FormatType::kR16G16_SSCALED:
1099 ret = VK_FORMAT_R16G16_SSCALED;
1100 break;
1101 case FormatType::kR16G16_UINT:
1102 ret = VK_FORMAT_R16G16_UINT;
1103 break;
1104 case FormatType::kR16G16_UNORM:
1105 ret = VK_FORMAT_R16G16_UNORM;
1106 break;
1107 case FormatType::kR16G16_USCALED:
1108 ret = VK_FORMAT_R16G16_USCALED;
1109 break;
1110 case FormatType::kR16_SFLOAT:
1111 ret = VK_FORMAT_R16_SFLOAT;
1112 break;
1113 case FormatType::kR16_SINT:
1114 ret = VK_FORMAT_R16_SINT;
1115 break;
1116 case FormatType::kR16_SNORM:
1117 ret = VK_FORMAT_R16_SNORM;
1118 break;
1119 case FormatType::kR16_SSCALED:
1120 ret = VK_FORMAT_R16_SSCALED;
1121 break;
1122 case FormatType::kR16_UINT:
1123 ret = VK_FORMAT_R16_UINT;
1124 break;
1125 case FormatType::kR16_UNORM:
1126 ret = VK_FORMAT_R16_UNORM;
1127 break;
1128 case FormatType::kR16_USCALED:
1129 ret = VK_FORMAT_R16_USCALED;
1130 break;
1131 case FormatType::kR32G32B32A32_SFLOAT:
1132 ret = VK_FORMAT_R32G32B32A32_SFLOAT;
1133 break;
1134 case FormatType::kR32G32B32A32_SINT:
1135 ret = VK_FORMAT_R32G32B32A32_SINT;
1136 break;
1137 case FormatType::kR32G32B32A32_UINT:
1138 ret = VK_FORMAT_R32G32B32A32_UINT;
1139 break;
1140 case FormatType::kR32G32B32_SFLOAT:
1141 ret = VK_FORMAT_R32G32B32_SFLOAT;
1142 break;
1143 case FormatType::kR32G32B32_SINT:
1144 ret = VK_FORMAT_R32G32B32_SINT;
1145 break;
1146 case FormatType::kR32G32B32_UINT:
1147 ret = VK_FORMAT_R32G32B32_UINT;
1148 break;
1149 case FormatType::kR32G32_SFLOAT:
1150 ret = VK_FORMAT_R32G32_SFLOAT;
1151 break;
1152 case FormatType::kR32G32_SINT:
1153 ret = VK_FORMAT_R32G32_SINT;
1154 break;
1155 case FormatType::kR32G32_UINT:
1156 ret = VK_FORMAT_R32G32_UINT;
1157 break;
1158 case FormatType::kR32_SFLOAT:
1159 ret = VK_FORMAT_R32_SFLOAT;
1160 break;
1161 case FormatType::kR32_SINT:
1162 ret = VK_FORMAT_R32_SINT;
1163 break;
1164 case FormatType::kR32_UINT:
1165 ret = VK_FORMAT_R32_UINT;
1166 break;
1167 case FormatType::kR4G4B4A4_UNORM_PACK16:
1168 ret = VK_FORMAT_R4G4B4A4_UNORM_PACK16;
1169 break;
1170 case FormatType::kR4G4_UNORM_PACK8:
1171 ret = VK_FORMAT_R4G4_UNORM_PACK8;
1172 break;
1173 case FormatType::kR5G5B5A1_UNORM_PACK16:
1174 ret = VK_FORMAT_R5G5B5A1_UNORM_PACK16;
1175 break;
1176 case FormatType::kR5G6B5_UNORM_PACK16:
1177 ret = VK_FORMAT_R5G6B5_UNORM_PACK16;
1178 break;
1179 case FormatType::kR64G64B64A64_SFLOAT:
1180 ret = VK_FORMAT_R64G64B64A64_SFLOAT;
1181 break;
1182 case FormatType::kR64G64B64A64_SINT:
1183 ret = VK_FORMAT_R64G64B64A64_SINT;
1184 break;
1185 case FormatType::kR64G64B64A64_UINT:
1186 ret = VK_FORMAT_R64G64B64A64_UINT;
1187 break;
1188 case FormatType::kR64G64B64_SFLOAT:
1189 ret = VK_FORMAT_R64G64B64_SFLOAT;
1190 break;
1191 case FormatType::kR64G64B64_SINT:
1192 ret = VK_FORMAT_R64G64B64_SINT;
1193 break;
1194 case FormatType::kR64G64B64_UINT:
1195 ret = VK_FORMAT_R64G64B64_UINT;
1196 break;
1197 case FormatType::kR64G64_SFLOAT:
1198 ret = VK_FORMAT_R64G64_SFLOAT;
1199 break;
1200 case FormatType::kR64G64_SINT:
1201 ret = VK_FORMAT_R64G64_SINT;
1202 break;
1203 case FormatType::kR64G64_UINT:
1204 ret = VK_FORMAT_R64G64_UINT;
1205 break;
1206 case FormatType::kR64_SFLOAT:
1207 ret = VK_FORMAT_R64_SFLOAT;
1208 break;
1209 case FormatType::kR64_SINT:
1210 ret = VK_FORMAT_R64_SINT;
1211 break;
1212 case FormatType::kR64_UINT:
1213 ret = VK_FORMAT_R64_UINT;
1214 break;
1215 case FormatType::kR8G8B8A8_SINT:
1216 ret = VK_FORMAT_R8G8B8A8_SINT;
1217 break;
1218 case FormatType::kR8G8B8A8_SNORM:
1219 ret = VK_FORMAT_R8G8B8A8_SNORM;
1220 break;
1221 case FormatType::kR8G8B8A8_SRGB:
1222 ret = VK_FORMAT_R8G8B8A8_SRGB;
1223 break;
1224 case FormatType::kR8G8B8A8_SSCALED:
1225 ret = VK_FORMAT_R8G8B8A8_SSCALED;
1226 break;
1227 case FormatType::kR8G8B8A8_UINT:
1228 ret = VK_FORMAT_R8G8B8A8_UINT;
1229 break;
1230 case FormatType::kR8G8B8A8_UNORM:
1231 ret = VK_FORMAT_R8G8B8A8_UNORM;
1232 break;
1233 case FormatType::kR8G8B8A8_USCALED:
1234 ret = VK_FORMAT_R8G8B8A8_USCALED;
1235 break;
1236 case FormatType::kR8G8B8_SINT:
1237 ret = VK_FORMAT_R8G8B8_SINT;
1238 break;
1239 case FormatType::kR8G8B8_SNORM:
1240 ret = VK_FORMAT_R8G8B8_SNORM;
1241 break;
1242 case FormatType::kR8G8B8_SRGB:
1243 ret = VK_FORMAT_R8G8B8_SRGB;
1244 break;
1245 case FormatType::kR8G8B8_SSCALED:
1246 ret = VK_FORMAT_R8G8B8_SSCALED;
1247 break;
1248 case FormatType::kR8G8B8_UINT:
1249 ret = VK_FORMAT_R8G8B8_UINT;
1250 break;
1251 case FormatType::kR8G8B8_UNORM:
1252 ret = VK_FORMAT_R8G8B8_UNORM;
1253 break;
1254 case FormatType::kR8G8B8_USCALED:
1255 ret = VK_FORMAT_R8G8B8_USCALED;
1256 break;
1257 case FormatType::kR8G8_SINT:
1258 ret = VK_FORMAT_R8G8_SINT;
1259 break;
1260 case FormatType::kR8G8_SNORM:
1261 ret = VK_FORMAT_R8G8_SNORM;
1262 break;
1263 case FormatType::kR8G8_SRGB:
1264 ret = VK_FORMAT_R8G8_SRGB;
1265 break;
1266 case FormatType::kR8G8_SSCALED:
1267 ret = VK_FORMAT_R8G8_SSCALED;
1268 break;
1269 case FormatType::kR8G8_UINT:
1270 ret = VK_FORMAT_R8G8_UINT;
1271 break;
1272 case FormatType::kR8G8_UNORM:
1273 ret = VK_FORMAT_R8G8_UNORM;
1274 break;
1275 case FormatType::kR8G8_USCALED:
1276 ret = VK_FORMAT_R8G8_USCALED;
1277 break;
1278 case FormatType::kR8_SINT:
1279 ret = VK_FORMAT_R8_SINT;
1280 break;
1281 case FormatType::kR8_SNORM:
1282 ret = VK_FORMAT_R8_SNORM;
1283 break;
1284 case FormatType::kR8_SRGB:
1285 ret = VK_FORMAT_R8_SRGB;
1286 break;
1287 case FormatType::kR8_SSCALED:
1288 ret = VK_FORMAT_R8_SSCALED;
1289 break;
1290 case FormatType::kR8_UINT:
1291 ret = VK_FORMAT_R8_UINT;
1292 break;
1293 case FormatType::kR8_UNORM:
1294 ret = VK_FORMAT_R8_UNORM;
1295 break;
1296 case FormatType::kR8_USCALED:
1297 ret = VK_FORMAT_R8_USCALED;
1298 break;
1299 case FormatType::kS8_UINT:
1300 ret = VK_FORMAT_S8_UINT;
1301 break;
1302 case FormatType::kX8_D24_UNORM_PACK32:
1303 ret = VK_FORMAT_X8_D24_UNORM_PACK32;
1304 break;
1305 }
1306 return ret;
1307 }
1308
IsRequiredSubgroupSizeSupported(const ShaderType type,const uint32_t required_subgroup_size) const1309 bool Device::IsRequiredSubgroupSizeSupported(
1310 const ShaderType type,
1311 const uint32_t required_subgroup_size) const {
1312 VkShaderStageFlagBits stage = {};
1313 switch (type) {
1314 case kShaderTypeGeometry:
1315 stage = VK_SHADER_STAGE_GEOMETRY_BIT;
1316 break;
1317 case kShaderTypeFragment:
1318 stage = VK_SHADER_STAGE_FRAGMENT_BIT;
1319 break;
1320 case kShaderTypeVertex:
1321 stage = VK_SHADER_STAGE_VERTEX_BIT;
1322 break;
1323 case kShaderTypeTessellationControl:
1324 stage = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
1325 break;
1326 case kShaderTypeTessellationEvaluation:
1327 stage = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
1328 break;
1329 case kShaderTypeCompute:
1330 stage = VK_SHADER_STAGE_COMPUTE_BIT;
1331 break;
1332 default:
1333 return false;
1334 }
1335 if ((stage & subgroup_size_control_properties_.requiredSubgroupSizeStages) ==
1336 0) {
1337 return false;
1338 }
1339 if (required_subgroup_size == 0 ||
1340 required_subgroup_size <
1341 subgroup_size_control_properties_.minSubgroupSize ||
1342 required_subgroup_size >
1343 subgroup_size_control_properties_.maxSubgroupSize) {
1344 return false;
1345 }
1346
1347 return true;
1348 }
1349
GetMinSubgroupSize() const1350 uint32_t Device::GetMinSubgroupSize() const {
1351 return subgroup_size_control_properties_.minSubgroupSize;
1352 }
1353
GetMaxSubgroupSize() const1354 uint32_t Device::GetMaxSubgroupSize() const {
1355 return subgroup_size_control_properties_.maxSubgroupSize;
1356 }
1357
1358 } // namespace vulkan
1359 } // namespace amber
1360